hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
313eb4e0ad42fec810ed87057b72f1b4886e876c
| 2,272
|
py
|
Python
|
tests/algos/test_plas.py
|
jamartinh/d3rlpy
|
87f478451674ef769eb8ce74e3663c4d3b1c325d
|
[
"MIT"
] | null | null | null |
tests/algos/test_plas.py
|
jamartinh/d3rlpy
|
87f478451674ef769eb8ce74e3663c4d3b1c325d
|
[
"MIT"
] | 1
|
2020-11-17T22:35:50.000Z
|
2020-11-17T22:35:50.000Z
|
tests/algos/test_plas.py
|
jamartinh/d3rlpy
|
87f478451674ef769eb8ce74e3663c4d3b1c325d
|
[
"MIT"
] | null | null | null |
import pytest
from d3rlpy.algos.plas import PLAS, PLASWithPerturbation
from tests import performance_test
from .algo_test import algo_tester, algo_update_tester, algo_pendulum_tester
@pytest.mark.parametrize("observation_shape", [(100,), (4, 84, 84)])
@pytest.mark.parametrize("action_size", [2])
@pytest.mark.parametrize("q_func_factory", ["mean", "qr", "iqn", "fqf"])
@pytest.mark.parametrize("scaler", [None, "min_max"])
@pytest.mark.parametrize("action_scaler", [None, "min_max"])
@pytest.mark.parametrize("target_reduction_type", ["min", "none"])
def test_plas(
observation_shape,
action_size,
q_func_factory,
scaler,
action_scaler,
target_reduction_type,
):
plas = PLAS(
q_func_factory=q_func_factory,
scaler=scaler,
action_scaler=action_scaler,
target_reduction_type=target_reduction_type,
)
algo_tester(plas, observation_shape)
algo_update_tester(plas, observation_shape, action_size)
@performance_test
@pytest.mark.parametrize("q_func_factory", ["mean", "qr", "iqn", "fqf"])
def test_plas_performance(q_func_factory):
plas = PLAS(q_func_factory=q_func_factory)
algo_pendulum_tester(plas, n_trials=1)
@pytest.mark.parametrize("observation_shape", [(100,), (4, 84, 84)])
@pytest.mark.parametrize("action_size", [2])
@pytest.mark.parametrize("q_func_factory", ["mean", "qr", "iqn", "fqf"])
@pytest.mark.parametrize("scaler", [None, "min_max"])
@pytest.mark.parametrize("action_scaler", [None, "min_max"])
@pytest.mark.parametrize("target_reduction_type", ["min", "none"])
def test_plas_with_perturbation(
observation_shape,
action_size,
q_func_factory,
scaler,
action_scaler,
target_reduction_type,
):
plas = PLASWithPerturbation(
q_func_factory=q_func_factory,
scaler=scaler,
action_scaler=action_scaler,
target_reduction_type=target_reduction_type,
)
algo_tester(plas, observation_shape)
algo_update_tester(plas, observation_shape, action_size)
@performance_test
@pytest.mark.parametrize("q_func_factory", ["mean", "qr", "iqn", "fqf"])
def test_plas_with_perturbation_performance(q_func_factory):
plas = PLASWithPerturbation(q_func_factory=q_func_factory)
algo_pendulum_tester(plas, n_trials=1)
| 33.411765
| 76
| 0.734155
| 292
| 2,272
| 5.35274
| 0.150685
| 0.051184
| 0.122841
| 0.069098
| 0.902111
| 0.852207
| 0.852207
| 0.852207
| 0.816379
| 0.816379
| 0
| 0.010638
| 0.131162
| 2,272
| 67
| 77
| 33.910448
| 0.781155
| 0
| 0
| 0.758621
| 0
| 0
| 0.12412
| 0.018486
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.068966
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
315a9f65380823c6b1d301af6a401001767d0bcd
| 1,146
|
py
|
Python
|
02-Learning_Path/02-03-Python/PYT-01-PythonCrashCourse/08-Functions/8.9-8.11-FunctionsList.py
|
nicode-io/The_Junior_Way
|
70fdfa8959c5f649f12264a043ddf296519f7508
|
[
"MIT"
] | 1
|
2021-08-08T20:40:31.000Z
|
2021-08-08T20:40:31.000Z
|
03-Cursus/03-06-Python_CrashCourse/08-Functions/8.9-8.11-FunctionsList.py
|
nicode-io/The_Junior_Way
|
70fdfa8959c5f649f12264a043ddf296519f7508
|
[
"MIT"
] | null | null | null |
03-Cursus/03-06-Python_CrashCourse/08-Functions/8.9-8.11-FunctionsList.py
|
nicode-io/The_Junior_Way
|
70fdfa8959c5f649f12264a043ddf296519f7508
|
[
"MIT"
] | null | null | null |
# 8.9
msg = ['hello you', 'are you fine', 'do you eat with us', 'have a nice evening']
def show_messages(msg_list):
'''Show messages in a list'''
for message in msg_list:
print(message)
show_messages(msg)
print('\n')
# 8.10
msg = ['hello you', 'are you fine', 'do you eat with us', 'have a nice evening']
archived_messages = []
def send_messages(msg_list, archive_list):
'''
Show messages before appending in archive_list
and remove message from msg_list
'''
while msg_list:
temp_msg = msg_list.pop()
print(temp_msg)
archive_list.append(temp_msg)
send_messages(msg, archived_messages)
print(msg, archived_messages)
print('\n')
# 8.11
msg = ['hello you', 'are you fine', 'do you eat with us', 'have a nice evening']
archived_messages = []
def send_messages(msg_list, archive_list):
'''
Show messages before appending in archive_list
and remove message from msg_list
'''
while msg_list:
temp_msg = msg_list.pop()
print(temp_msg)
archive_list.append(temp_msg)
send_messages(msg[:], archived_messages)
print(msg, archived_messages)
print('\n')
| 27.95122
| 80
| 0.673647
| 171
| 1,146
| 4.315789
| 0.222222
| 0.094851
| 0.081301
| 0.130081
| 0.863144
| 0.863144
| 0.863144
| 0.863144
| 0.863144
| 0.863144
| 0
| 0.008791
| 0.205934
| 1,146
| 40
| 81
| 28.65
| 0.802198
| 0.172775
| 0
| 0.769231
| 0
| 0
| 0.199557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115385
| false
| 0
| 0
| 0
| 0.115385
| 0.307692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
315d30138bd1a43463cf2341d43065c77c5cd9a7
| 24,709
|
py
|
Python
|
esper/controllers/device/command.py
|
pallavigopi/esper-cli
|
83c3536088031fd6a9e5e6e7ae8f18e3e82eeb78
|
[
"Apache-2.0"
] | 7
|
2019-05-17T06:56:37.000Z
|
2022-03-18T16:54:48.000Z
|
esper/controllers/device/command.py
|
pallavigopi/esper-cli
|
83c3536088031fd6a9e5e6e7ae8f18e3e82eeb78
|
[
"Apache-2.0"
] | 5
|
2019-07-29T17:55:33.000Z
|
2022-01-19T02:01:45.000Z
|
esper/controllers/device/command.py
|
pallavigopi/esper-cli
|
83c3536088031fd6a9e5e6e7ae8f18e3e82eeb78
|
[
"Apache-2.0"
] | 9
|
2019-08-22T06:15:39.000Z
|
2021-10-04T09:08:50.000Z
|
from cement import ex, Controller
from esperclient import CommandRequest
from esperclient.rest import ApiException
from esper.controllers.enums import OutputFormat, DeviceCommandEnum
from esper.ext.api_client import APIClient
from esper.ext.db_wrapper import DBWrapper
from esper.ext.utils import validate_creds_exists, parse_error_message
class DeviceCommand(Controller):
class Meta:
label = 'device-command'
# text displayed at the top of --help output
description = 'Fire commands for device'
# text displayed at the bottom of --help output
epilog = 'Usage: espercli device-command'
stacked_type = 'nested'
stacked_on = 'base'
def _command_basic_response(self, command, format=OutputFormat.TABULATED):
valid_keys = ['id', 'command', 'state']
if format == OutputFormat.TABULATED:
title = "TITLE"
details = "DETAILS"
renderable = [{title: k, details: v} for k, v in command.to_dict().items() if k in valid_keys]
else:
renderable = {k: v for k, v in command.to_dict().items() if k in valid_keys}
return renderable
@ex(
help='Show command details',
arguments=[
(['command_id'],
{'help': 'Device command id',
'action': 'store'}),
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def show(self):
command_id = self.app.pargs.command_id
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-show] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-show] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-show] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
try:
response = command_client.get_command(command_id, device_id, enterprise_id)
except ApiException as e:
self.app.log.error(f"[device-command-show] Failed to show details of command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
@ex(
help='Install application version',
arguments=[
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-V', '--version'],
{'help': 'Application version id',
'action': 'store',
'dest': 'version'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def install(self):
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-install] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-install] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-install] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
version_id = self.app.pargs.version
command_request = CommandRequest(command_args={"app_version": version_id},
command=DeviceCommandEnum.INSTALL.name)
try:
response = command_client.run_command(enterprise_id, device_id, command_request)
except ApiException as e:
self.app.log.error(f"[device-command-install] Failed to fire the install command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
@ex(
help='Uninstall application version',
arguments=[
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-V', '--version'],
{'help': 'Application version id',
'action': 'store',
'dest': 'version'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def uninstall(self):
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-uninstall] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-uninstall] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-uninstall] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
version_id = self.app.pargs.version
command_request = CommandRequest(command_args={"app_version": version_id},
command=DeviceCommandEnum.UNINSTALL.name)
try:
response = command_client.run_command(enterprise_id, device_id, command_request)
except ApiException as e:
self.app.log.error(f"[device-command-uninstall] Failed to fire the uninstall command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
@ex(
help='Ping a device',
arguments=[
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def ping(self):
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-ping] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-ping] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-ping] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
command_request = CommandRequest(command=DeviceCommandEnum.UPDATE_HEARTBEAT.name)
try:
response = command_client.run_command(enterprise_id, device_id, command_request)
except ApiException as e:
self.app.log.error(f"[device-command-ping] Failed to fire the ping command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
@ex(
help='Lock a device',
arguments=[
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def lock(self):
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-lock] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-lock] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-lock] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
command_request = CommandRequest(command=DeviceCommandEnum.LOCK.name)
try:
response = command_client.run_command(enterprise_id, device_id, command_request)
except ApiException as e:
self.app.log.error(f"[device-command-lock] Failed to fire the lock command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
@ex(
help='Reboot a device',
arguments=[
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def reboot(self):
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-reboot] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-reboot] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-reboot] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
command_request = CommandRequest(command=DeviceCommandEnum.REBOOT.name)
try:
response = command_client.run_command(enterprise_id, device_id, command_request)
except ApiException as e:
self.app.log.error(f"[device-command-reboot] Failed to fire the reboot command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
@ex(
help='Wipe a device',
arguments=[
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-e', '--exstorage'],
{'help': 'External storage',
'action': 'store_true',
'dest': 'external_storage'}),
(['-f', '--frp'],
{'help': 'Factory reset production',
'action': 'store_true',
'dest': 'frp'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def wipe(self):
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-wipe] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-wipe] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-wipe] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
external_storage = self.app.pargs.external_storage
frp = self.app.pargs.frp
if external_storage is None:
self.app.log.info('[device-command-wipe] External storage value is empty')
self.app.render('External storage value is empty\n')
if frp is None:
self.app.log.info('[device-command-wipe] Factory reset production value is empty')
self.app.render('Factory reset production value is empty\n')
command_request = CommandRequest(command_args={"wipe_external_storage": external_storage, 'wipe_FRP': frp},
command=DeviceCommandEnum.WIPE.name)
try:
response = command_client.run_command(enterprise_id, device_id, command_request)
except ApiException as e:
self.app.log.error(f"[device-command-wipe] Failed to fire the wipe command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
@ex(
help='Clear app data',
arguments=[
(['-d', '--device'],
{'help': 'Device name',
'action': 'store',
'dest': 'device'}),
(['-P', '--package-name'],
{'help': 'Application package name',
'action': 'store',
'dest': 'package_name'}),
(['-j', '--json'],
{'help': 'Render result in Json format',
'action': 'store_true',
'dest': 'json'}),
]
)
def clear_app_data(self):
validate_creds_exists(self.app)
db = DBWrapper(self.app.creds)
command_client = APIClient(db.get_configure()).get_command_api_client()
enterprise_id = db.get_enterprise_id()
device_client = APIClient(db.get_configure()).get_device_api_client()
if self.app.pargs.device:
device_name = self.app.pargs.device
kwargs = {'name': device_name}
try:
search_response = device_client.get_all_devices(enterprise_id, limit=1, offset=0, **kwargs)
if not search_response.results or len(search_response.results) == 0:
self.app.log.debug(f'[device-command-clear-app-data] Device does not exist with name {device_name}')
self.app.render(f'Device does not exist with name {device_name}\n')
return
response = search_response.results[0]
device_id = response.id
except ApiException as e:
self.app.log.error(f"[device-command-clear-app-data] Failed to list devices: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
else:
device = db.get_device()
if not device or not device.get('id'):
self.app.log.debug('[device-command-clear-app-data] There is no active device.')
self.app.render('There is no active device.\n')
return
device_id = device.get('id')
package_name = self.app.pargs.package_name
if package_name is None:
self.app.log.info('[device-command-clear-app-data] Package name is empty')
self.app.render('Package name is empty\n')
return
command_request = CommandRequest(command_args={"package_name": package_name},
command=DeviceCommandEnum.CLEAR_APP_DATA.name)
try:
response = command_client.run_command(enterprise_id, device_id, command_request)
except ApiException as e:
self.app.log.error(f"[device-command-clear-app-data] Failed to fire the CLEAR_APP_DATA command: {e}")
self.app.render(f"ERROR: {parse_error_message(self.app, e)}\n")
return
if not self.app.pargs.json:
renderable = self._command_basic_response(response)
self.app.render(renderable, format=OutputFormat.TABULATED.value, headers="keys", tablefmt="plain")
else:
renderable = self._command_basic_response(response, OutputFormat.JSON)
self.app.render(renderable, format=OutputFormat.JSON.value)
| 44.601083
| 120
| 0.576592
| 2,833
| 24,709
| 4.878221
| 0.054712
| 0.074964
| 0.047974
| 0.024313
| 0.879595
| 0.867728
| 0.85919
| 0.85919
| 0.856802
| 0.851447
| 0
| 0.001862
| 0.304626
| 24,709
| 553
| 121
| 44.681736
| 0.802468
| 0.003561
| 0
| 0.756539
| 0
| 0.002012
| 0.207775
| 0.053863
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018109
| false
| 0
| 0.014085
| 0
| 0.104628
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3183b65b842430d4bf3b0a4df9a527bcb91c49c2
| 22,167
|
py
|
Python
|
nixnet/_cprops.py
|
ni-ldp/nixnet-python
|
83f30c5b44098de0dc4828838e263b7be0866228
|
[
"MIT"
] | 16
|
2017-06-14T19:44:45.000Z
|
2022-02-06T15:14:52.000Z
|
nixnet/_cprops.py
|
ni-ldp/nixnet-python
|
83f30c5b44098de0dc4828838e263b7be0866228
|
[
"MIT"
] | 216
|
2017-06-15T16:41:10.000Z
|
2021-09-23T23:00:50.000Z
|
nixnet/_cprops.py
|
ni-ldp/nixnet-python
|
83f30c5b44098de0dc4828838e263b7be0866228
|
[
"MIT"
] | 23
|
2017-06-14T22:51:08.000Z
|
2022-03-03T03:04:40.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ctypes # type: ignore
import typing # NOQA: F401
from nixnet import _cfuncs
from nixnet import _ctypedefs
from nixnet import _errors
from nixnet import _funcs
def get_session_bool(ref, prop_id):
# type: (int, int) -> bool
return bool(get_session_u8(ref, prop_id))
def set_session_bool(ref, prop_id, value):
# type: (int, int, bool) -> None
set_session_u8(ref, prop_id, 1 if value else 0)
def get_session_u8(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.bool8.BYTES)
value_ctypes = _ctypedefs.u8()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_session_u8(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.bool8.BYTES)
value_ctypes = _ctypedefs.u8(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_session_u32(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u32.BYTES)
value_ctypes = _ctypedefs.u32()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_session_u32(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u32.BYTES)
value_ctypes = _ctypedefs.u32(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_session_u32_array(ref, prop_id):
# type: (int, int) -> typing.Iterable[int]
value_size = _funcs.nx_get_property_size(ref, prop_id)
elements = value_size // _ctypedefs.u32.BYTES
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.u32 * (elements))() # type: ignore
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
for value in value_ctypes:
yield value.value
def set_session_u32_array(ref, prop_id, value):
# type: (int, int, typing.List[int]) -> None
value_size = len(value) * _ctypedefs.u32.BYTES
elements = value_size // _ctypedefs.u32.BYTES
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.u32 * (elements))(*value) # type: ignore
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
def get_session_u64(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u64.BYTES)
value_ctypes = _ctypedefs.u64()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_session_u64(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u64.BYTES)
value_ctypes = _ctypedefs.u64(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_session_f64(ref, prop_id):
# type: (int, int) -> float
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.f64.BYTES)
value_ctypes = _ctypedefs.f64()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_session_f64(ref, prop_id, value):
# type: (int, int, float) -> None
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.f64.BYTES)
value_ctypes = _ctypedefs.f64(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_session_string(ref, prop_id):
# type: (int, int) -> typing.Text
value_size = _funcs.nx_get_property_size(ref, prop_id)
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = ctypes.create_string_buffer(value_size)
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
return value_ctypes.value.decode("ascii")
def set_session_string(ref, prop_id, value):
# type: (int, int, typing.Text) -> None
value_bytes = value.encode("ascii")
value_size = len(value_bytes) * _ctypedefs.char.BYTES
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = ctypes.create_string_buffer(value_bytes)
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
def get_session_string_array(ref, prop_id):
# type: (int, int) -> typing.List[typing.Text]
value = get_session_string(ref, prop_id)
return value.split(",")
def get_session_ref(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.nxSessionRef_t.BYTES)
value_ctypes = _ctypedefs.nxSessionRef_t()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_session_ref(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.nxSessionRef_t.BYTES)
value_ctypes = _ctypedefs.nxSessionRef_t(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_session_ref_array_len(ref, prop_id):
# type: (int, int) -> int
value_size = _funcs.nx_get_property_size(ref, prop_id)
elements = value_size // _ctypedefs.nxSessionRef_t.BYTES
return elements
def get_session_ref_array(ref, prop_id):
# type: (int, int) -> typing.Iterable[int]
value_size = _funcs.nx_get_property_size(ref, prop_id)
elements = value_size // _ctypedefs.nxSessionRef_t.BYTES
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.nxSessionRef_t * (elements))() # type: ignore
result = _cfuncs.lib.nx_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
for value in value_ctypes:
yield value.value
def set_session_ref_array(ref, prop_id, value):
# type: (int, int, typing.List[int]) -> None
value_size = len(value) * _ctypedefs.nxSessionRef_t.BYTES
elements = value_size // _ctypedefs.nxSessionRef_t.BYTES
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.nxSessionRef_t * (elements))(*value) # type: ignore
result = _cfuncs.lib.nx_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
def set_session_sub_u32(ref, sub, prop_id, value):
# type: (int, int, int, int) -> None
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
sub_ctypes = _ctypedefs.u32(sub)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u32.BYTES)
value_ctypes = _ctypedefs.u32(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_set_sub_property(
ref_ctypes,
sub_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def set_session_sub_f64(ref, sub, prop_id, value):
# type: (int, int, int, float) -> None
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
sub_ctypes = _ctypedefs.u32(sub)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.f64.BYTES)
value_ctypes = _ctypedefs.f64(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nx_set_sub_property(
ref_ctypes,
sub_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def set_session_sub_string(ref, sub, prop_id, value):
# type: (int, int, int, typing.Text) -> None
value_bytes = value.encode("ascii")
value_size = len(value_bytes) * _ctypedefs.char.BYTES
ref_ctypes = _ctypedefs.nxSessionRef_t(ref)
sub_ctypes = _ctypedefs.u32(sub)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = ctypes.create_string_buffer(value_bytes)
result = _cfuncs.lib.nx_set_sub_property(
ref_ctypes,
sub_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
def get_database_bool(ref, prop_id):
# type: (int, int) -> bool
return bool(get_database_u8(ref, prop_id))
def set_database_bool(ref, prop_id, value):
# type: (int, int, bool) -> None
set_database_u8(ref, prop_id, 1 if value else 0)
def get_database_u8(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.bool8.BYTES)
value_ctypes = _ctypedefs.u8()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_database_u8(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.bool8.BYTES)
value_ctypes = _ctypedefs.u8(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_database_u8_array(ref, prop_id):
# type: (int, int) -> typing.Iterable[int]
value_size = _funcs.nxdb_get_property_size(ref, prop_id)
elements = value_size // _ctypedefs.u8.BYTES
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.u8 * (elements))() # type: ignore
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
for value in value_ctypes:
yield value.value
def set_database_u8_array(ref, prop_id, value):
# type: (int, int, typing.List[int]) -> None
value_size = len(value) * _ctypedefs.u8.BYTES
elements = value_size // _ctypedefs.u8.BYTES
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.u8 * (elements))(*value) # type: ignore
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
def get_database_u32(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u32.BYTES)
value_ctypes = _ctypedefs.u32()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_database_u32(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u32.BYTES)
value_ctypes = _ctypedefs.u32(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_database_u32_array(ref, prop_id):
# type: (int, int) -> typing.Iterable[int]
value_size = _funcs.nxdb_get_property_size(ref, prop_id)
elements = value_size // _ctypedefs.u32.BYTES
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.u32 * (elements))() # type: ignore
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
for value in value_ctypes:
yield value.value
def set_database_u32_array(ref, prop_id, value):
# type: (int, int, typing.List[int]) -> None
value_size = len(value) * _ctypedefs.u32.BYTES
elements = value_size // _ctypedefs.u32.BYTES
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.u32 * (elements))(*value) # type: ignore
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
def get_database_u64(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u64.BYTES)
value_ctypes = _ctypedefs.u64()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_database_u64(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.u64.BYTES)
value_ctypes = _ctypedefs.u64(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_database_f64(ref, prop_id):
# type: (int, int) -> float
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.f64.BYTES)
value_ctypes = _ctypedefs.f64()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_database_f64(ref, prop_id, value):
# type: (int, int, float) -> None
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.f64.BYTES)
value_ctypes = _ctypedefs.f64(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_database_string(ref, prop_id):
# type: (int, int) -> typing.Text
value_size = _funcs.nxdb_get_property_size(ref, prop_id)
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = ctypes.create_string_buffer(value_size)
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
return value_ctypes.value.decode("ascii")
def set_database_string(ref, prop_id, value):
# type: (int, int, typing.Text) -> None
value_bytes = value.encode("ascii")
value_size = len(value_bytes) * _ctypedefs.char.BYTES
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = ctypes.create_string_buffer(value_bytes)
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
def get_database_ref(ref, prop_id):
# type: (int, int) -> int
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.nxDatabaseRef_t.BYTES)
value_ctypes = _ctypedefs.nxDatabaseRef_t()
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
return value_ctypes.value
def set_database_ref(ref, prop_id, value):
# type: (int, int, int) -> None
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(_ctypedefs.nxDatabaseRef_t.BYTES)
value_ctypes = _ctypedefs.nxDatabaseRef_t(value)
value_ctypes_ptr = ctypes.pointer(value_ctypes)
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes_ptr)
_errors.check_for_error(result.value)
def get_database_ref_array_len(ref, prop_id):
# type: (int, int) -> int
value_size = _funcs.nxdb_get_property_size(ref, prop_id)
elements = value_size // _ctypedefs.nxDatabaseRef_t.BYTES
return elements
def get_database_ref_array(ref, prop_id):
# type: (int, int) -> typing.Iterable[int]
value_size = _funcs.nxdb_get_property_size(ref, prop_id)
elements = value_size // _ctypedefs.nxDatabaseRef_t.BYTES
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.nxDatabaseRef_t * (elements))() # type: ignore
result = _cfuncs.lib.nxdb_get_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
for value in value_ctypes:
yield value.value
def set_database_ref_array(ref, prop_id, value):
# type: (int, int, typing.List[int]) -> None
value_size = len(value) * _ctypedefs.nxDatabaseRef_t.BYTES
elements = value_size // _ctypedefs.nxDatabaseRef_t.BYTES
ref_ctypes = _ctypedefs.nxDatabaseRef_t(ref)
prop_id_ctypes = _ctypedefs.u32(prop_id)
prop_size_ctypes = _ctypedefs.u32(value_size)
value_ctypes = (_ctypedefs.nxDatabaseRef_t * (elements))(*value) # type: ignore
result = _cfuncs.lib.nxdb_set_property(
ref_ctypes,
prop_id_ctypes,
prop_size_ctypes,
value_ctypes)
_errors.check_for_error(result.value)
| 33.434389
| 84
| 0.711102
| 3,009
| 22,167
| 4.788302
| 0.024261
| 0.070378
| 0.055594
| 0.053928
| 0.979386
| 0.975708
| 0.963562
| 0.963562
| 0.96148
| 0.956483
| 0
| 0.016602
| 0.195696
| 22,167
| 662
| 85
| 33.484894
| 0.791519
| 0.071322
| 0
| 0.859649
| 0
| 0
| 0.001267
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08577
| false
| 0
| 0.017544
| 0.003899
| 0.136452
| 0.001949
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31abfe24e36b98516396fcc811a45925a37d565e
| 87,742
|
py
|
Python
|
web/transiq/restapi/data.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/restapi/data.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/restapi/data.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib.auth.models import User
from django.db.models import Q
from employee.models import Employee
from fileupload.models import PODFile
from fms.models import MobileAppVersions
from restapi.helper_api import check_booking_status, create_new_booking_status, update_booking_status
from restapi.models import UserCategory, EmployeeRoles, TaskDashboardFunctionalities, EmployeeRolesMapping, \
EmployeeRolesFunctionalityMapping, BookingStatuses, BookingStatusChain, BookingStatusesMapping
import pandas as pd
from datetime import datetime, timedelta
from restapi.utils import get_or_none
from team.models import ManualBooking, Invoice
def update_user_category():
if not UserCategory.objects.filter(category='customer'):
UserCategory.objects.create(category='customer')
if not UserCategory.objects.filter(category='employee'):
UserCategory.objects.create(category='employee')
if not UserCategory.objects.filter(category='supplier'):
UserCategory.objects.create(category='supplier')
if not UserCategory.objects.filter(category='broker'):
UserCategory.objects.filter(category='broker')
def update_employee_roles():
if not EmployeeRoles.objects.filter(role='office_data_entry'):
EmployeeRoles.objects.create(role='office_data_entry')
if not EmployeeRoles.objects.filter(role='ops_executive'):
EmployeeRoles.objects.create(role='ops_executive')
if not EmployeeRoles.objects.filter(role='accounts_payable'):
EmployeeRoles.objects.create(role='accounts_payable')
if not EmployeeRoles.objects.filter(role='accounts_receivable'):
EmployeeRoles.objects.create(role='accounts_receivable')
if not EmployeeRoles.objects.filter(role='sales'):
EmployeeRoles.objects.create(role='sales')
if not EmployeeRoles.objects.filter(role='traffic'):
EmployeeRoles.objects.create(role='traffic')
if not EmployeeRoles.objects.filter(role='city_head'):
EmployeeRoles.objects.create(role='city_head')
if not EmployeeRoles.objects.filter(role='management'):
EmployeeRoles.objects.create(role='management')
if not EmployeeRoles.objects.filter(role='tech'):
EmployeeRoles.objects.create(role='tech')
def update_td_functionalities():
if not TaskDashboardFunctionalities.objects.filter(functionality='new_inquiry'):
TaskDashboardFunctionalities.objects.create(functionality='new_inquiry')
if not TaskDashboardFunctionalities.objects.filter(functionality='customer_inquiries'):
TaskDashboardFunctionalities.objects.create(functionality='customer_inquiries')
if not TaskDashboardFunctionalities.objects.filter(functionality='open_inquiries'):
TaskDashboardFunctionalities.objects.create(functionality='open_inquiries')
if not TaskDashboardFunctionalities.objects.filter(functionality='my_inquiries'):
TaskDashboardFunctionalities.objects.create(functionality='my_inquiries')
if not TaskDashboardFunctionalities.objects.filter(functionality='pending_payments'):
TaskDashboardFunctionalities.objects.create(functionality='pending_payments')
if not TaskDashboardFunctionalities.objects.filter(functionality='pending_lr'):
TaskDashboardFunctionalities.objects.create(functionality='pending_lr')
if not TaskDashboardFunctionalities.objects.filter(functionality='in_transit'):
TaskDashboardFunctionalities.objects.create(functionality='in_transit')
if not TaskDashboardFunctionalities.objects.filter(functionality='invoice_confirmation'):
TaskDashboardFunctionalities.objects.create(functionality='invoice_confirmation')
if not TaskDashboardFunctionalities.objects.filter(functionality='delivered'):
TaskDashboardFunctionalities.objects.create(functionality='delivered')
if not TaskDashboardFunctionalities.objects.filter(functionality='confirm_booking'):
TaskDashboardFunctionalities.objects.create(functionality='confirm_booking')
if not TaskDashboardFunctionalities.objects.filter(functionality='lr_generation'):
TaskDashboardFunctionalities.objects.create(functionality='lr_generation')
if not TaskDashboardFunctionalities.objects.filter(functionality='pay_advance'):
TaskDashboardFunctionalities.objects.create(functionality='pay_advance')
if not TaskDashboardFunctionalities.objects.filter(functionality='pay_balance'):
TaskDashboardFunctionalities.objects.create(functionality='pay_balance')
if not TaskDashboardFunctionalities.objects.filter(functionality='send_invoice'):
TaskDashboardFunctionalities.objects.create(functionality='send_invoice')
if not TaskDashboardFunctionalities.objects.filter(functionality='verify_pod'):
TaskDashboardFunctionalities.objects.create(functionality='verify_pod')
if not TaskDashboardFunctionalities.objects.filter(functionality='raise_invoice'):
TaskDashboardFunctionalities.objects.create(functionality='raise_invoice')
if not TaskDashboardFunctionalities.objects.filter(functionality='confirm_invoice'):
TaskDashboardFunctionalities.objects.create(functionality='confirm_invoice')
if not TaskDashboardFunctionalities.objects.filter(functionality='inward_entry'):
TaskDashboardFunctionalities.objects.create(functionality='inward_entry')
if not TaskDashboardFunctionalities.objects.filter(functionality='process_payments'):
TaskDashboardFunctionalities.objects.create(functionality='process_payments')
if not TaskDashboardFunctionalities.objects.filter(functionality='reconcile'):
TaskDashboardFunctionalities.objects.create(functionality='reconcile')
def update_employee_roles_mapping():
# EmployeeRolesMapping.objects.all().delete()
df = pd.read_excel('/Users/aaho/Downloads/Employee Role Mapping.xlsx')
df = df.fillna('')
for i, row in df.iterrows():
if row['Emp ID']:
employee = Employee.objects.get(id=row['Emp ID'])
if row['Role 1']:
emp_role = EmployeeRoles.objects.get(role=row['Role 1'])
if not EmployeeRolesMapping.objects.filter(employee=employee, employee_role=emp_role,
employee_status='active'):
EmployeeRolesMapping.objects.create(employee=employee, employee_role=emp_role,
employee_status='active')
if row['Role 2']:
emp_role = EmployeeRoles.objects.get(role=row['Role 2'])
if not EmployeeRolesMapping.objects.filter(employee=employee, employee_role=emp_role,
employee_status='active'):
EmployeeRolesMapping.objects.create(employee=employee, employee_role=emp_role,
employee_status='active')
if row['Role 3']:
emp_role = EmployeeRoles.objects.get(role=row['Role 3'])
if not EmployeeRolesMapping.objects.filter(employee=employee, employee_role=emp_role,
employee_status='active'):
EmployeeRolesMapping.objects.create(employee=employee, employee_role=emp_role,
employee_status='active')
def update_employee_roles_functionalities_mapping():
# EmployeeRolesFunctionalityMapping.objects.all().delete()
emp_role_sales = EmployeeRoles.objects.get(role='sales')
emp_role_traffic = EmployeeRoles.objects.get(role='traffic')
emp_role_ops = EmployeeRoles.objects.get(role='ops_executive')
emp_role_city_head = EmployeeRoles.objects.get(role='city_head')
emp_role_management = EmployeeRoles.objects.get(role='management')
emp_role_tech = EmployeeRoles.objects.get(role='tech')
emp_role_ode = EmployeeRoles.objects.get(role='office_data_entry')
emp_role_accounts_payable = EmployeeRoles.objects.get(role='accounts_payable')
emp_role_accounts_receivable = EmployeeRoles.objects.get(role='accounts_receivable')
td_new_inquiry = TaskDashboardFunctionalities.objects.get(functionality='new_inquiry')
td_customer_inquiries = TaskDashboardFunctionalities.objects.get(functionality='customer_inquiries')
td_open_inquiries = TaskDashboardFunctionalities.objects.get(functionality='open_inquiries')
td_my_inquiries = TaskDashboardFunctionalities.objects.get(functionality='my_inquiries')
td_pending_payments = TaskDashboardFunctionalities.objects.get(functionality='pending_payments')
td_pending_lr = TaskDashboardFunctionalities.objects.get(functionality='pending_lr')
td_in_transit = TaskDashboardFunctionalities.objects.get(functionality='in_transit')
td_invoice_confirmation = TaskDashboardFunctionalities.objects.get(functionality='invoice_confirmation')
td_delivered = TaskDashboardFunctionalities.objects.get(functionality='delivered')
td_confirm_booking = TaskDashboardFunctionalities.objects.get(functionality='confirm_booking')
td_lr_generation = TaskDashboardFunctionalities.objects.get(functionality='lr_generation')
td_pay_advance = TaskDashboardFunctionalities.objects.get(functionality='pay_advance')
td_pay_balance = TaskDashboardFunctionalities.objects.get(functionality='pay_balance')
td_send_invoice = TaskDashboardFunctionalities.objects.get(functionality='send_invoice')
td_verify_pod = TaskDashboardFunctionalities.objects.get(functionality='verify_pod')
td_raise_invoice = TaskDashboardFunctionalities.objects.get(functionality='raise_invoice')
td_confirm_invoice = TaskDashboardFunctionalities.objects.get(functionality='confirm_invoice')
td_inward_entry = TaskDashboardFunctionalities.objects.get(functionality='inward_entry')
td_process_payments = TaskDashboardFunctionalities.objects.get(functionality='process_payments')
td_reconcile = TaskDashboardFunctionalities.objects.get(functionality='reconcile')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_new_inquiry,
caption='New Inquiry'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales, td_functionality=td_new_inquiry,
caption='New Inquiry')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_my_inquiries,
caption='My Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales,
td_functionality=td_my_inquiries,
caption='My Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_pending_payments,
caption='Pending Payment'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales,
td_functionality=td_pending_payments,
caption='Pending Payment')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_pending_lr,
caption='Pending LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales,
td_functionality=td_pending_lr,
caption='Pending LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_in_transit,
caption='In Transit'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales,
td_functionality=td_in_transit,
caption='In Transit')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_sales,
td_functionality=td_delivered,
caption='Delivered'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_sales,
td_functionality=td_delivered,
caption='Delivered')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_traffic,
td_functionality=td_open_inquiries,
caption='Open Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_traffic,
td_functionality=td_open_inquiries,
caption='Open Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_traffic,
td_functionality=td_in_transit,
caption='In Transit'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_traffic,
td_functionality=td_in_transit,
caption='In Transit')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_traffic,
td_functionality=td_pending_lr,
caption='Pending LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_traffic,
td_functionality=td_pending_lr,
caption='Pending LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_traffic,
td_functionality=td_delivered,
caption='Delivered'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_traffic,
td_functionality=td_delivered,
caption='Delivered')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ops,
td_functionality=td_delivered,
caption='Delivered'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ops,
td_functionality=td_delivered,
caption='Delivered')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ops,
td_functionality=td_in_transit,
caption='In Transit'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ops,
td_functionality=td_in_transit,
caption='In Transit')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ops, td_functionality=td_pending_lr,
caption='Pending LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ops, td_functionality=td_pending_lr,
caption='Pending LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_new_inquiry,
caption='New Inquiry'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_new_inquiry,
caption='New Inquiry')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_my_inquiries,
caption='My Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_my_inquiries,
caption='My Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_pending_payments,
caption='Pending Payment'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_pending_payments,
caption='Pending Payment')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_pending_lr,
caption='Pending LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_pending_lr,
caption='Pending LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_in_transit,
caption='In Transit'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_in_transit,
caption='In Transit')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_delivered,
caption='Delivered'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_delivered,
caption='Delivered')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_city_head,
td_functionality=td_open_inquiries,
caption='Open Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_city_head,
td_functionality=td_open_inquiries,
caption='Open Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_new_inquiry,
caption='New Inquiry'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_new_inquiry,
caption='New Inquiry')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_my_inquiries,
caption='My Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_my_inquiries,
caption='My Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_pending_payments,
caption='Pending Payment'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_pending_payments,
caption='Pending Payment')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_pending_lr,
caption='Pending LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_pending_lr,
caption='Pending LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_in_transit,
caption='In Transit'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_in_transit,
caption='In Transit')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_delivered,
caption='Delivered'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_delivered,
caption='Delivered')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_open_inquiries,
caption='Open Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_open_inquiries,
caption='Open Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_confirm_booking,
caption='New Booking'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_confirm_booking,
caption='New Booking')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_lr_generation,
caption='Generate LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_lr_generation,
caption='Generate LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_pay_advance,
caption='Pay Advance'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_pay_advance,
caption='Pay Advance')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_pay_balance,
caption='Pay Balance'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_pay_balance,
caption='Pay Balance')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_send_invoice,
caption='Send Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_send_invoice,
caption='Send Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_verify_pod,
caption='Verify PoD'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_verify_pod,
caption='Verify PoD')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_raise_invoice,
caption='Raise Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_raise_invoice,
caption='Raise Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_confirm_invoice,
caption='Confirm Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_confirm_invoice,
caption='Confirm Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_inward_entry,
caption='Inward Entry'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_inward_entry,
caption='Inward Entry')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_process_payments,
caption='Process Payment'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_process_payments,
caption='Process Payment')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_management,
td_functionality=td_reconcile,
caption='Reconcile'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_management,
td_functionality=td_reconcile,
caption='Reconcile')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_new_inquiry,
caption='New Inquiry'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_new_inquiry,
caption='New Inquiry')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_customer_inquiries,
caption='Customer Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_my_inquiries,
caption='My Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_my_inquiries,
caption='My Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_pending_payments,
caption='Pending Payment'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_pending_payments,
caption='Pending Payment')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_pending_lr,
caption='Pending LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_pending_lr,
caption='Pending LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_in_transit,
caption='In Transit'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_in_transit,
caption='In Transit')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_invoice_confirmation,
caption='Invoice Confirmation')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_delivered,
caption='Delivered'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_delivered,
caption='Delivered')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_open_inquiries,
caption='Open Inquiries'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_open_inquiries,
caption='Open Inquiries')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_confirm_booking,
caption='New Booking'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_confirm_booking,
caption='New Booking')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_lr_generation,
caption='Generate LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_lr_generation,
caption='Generate LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_pay_advance,
caption='Pay Advance'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_pay_advance,
caption='Pay Advance')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_pay_balance,
caption='Pay Balance'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_pay_balance,
caption='Pay Balance')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_send_invoice,
caption='Send Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_send_invoice,
caption='Send Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_verify_pod,
caption='Verify PoD'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_verify_pod,
caption='Verify PoD')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_raise_invoice,
caption='Raise Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_raise_invoice,
caption='Raise Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_confirm_invoice,
caption='Confirm Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_confirm_invoice,
caption='Confirm Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_inward_entry,
caption='Inward Entry'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_inward_entry,
caption='Inward Entry')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_process_payments,
caption='Process Payment'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_process_payments,
caption='Process Payment')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_tech,
td_functionality=td_reconcile,
caption='Reconcile'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_tech,
td_functionality=td_reconcile,
caption='Reconcile')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ode,
td_functionality=td_confirm_booking,
caption='New Booking'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ode,
td_functionality=td_confirm_booking,
caption='New Booking')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ode,
td_functionality=td_lr_generation,
caption='Generate LR'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ode,
td_functionality=td_lr_generation,
caption='Generate LR')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ode,
td_functionality=td_pay_advance,
caption='Pay Advance'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ode,
td_functionality=td_pay_advance,
caption='Pay Advance')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ode,
td_functionality=td_pay_balance,
caption='Pay Balance'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ode,
td_functionality=td_pay_balance,
caption='Pay Balance')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_ode,
td_functionality=td_send_invoice,
caption='Send Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_ode,
td_functionality=td_send_invoice,
caption='Send Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_accounts_receivable,
td_functionality=td_verify_pod,
caption='Verify PoD'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_accounts_receivable,
td_functionality=td_verify_pod,
caption='Verify PoD')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_accounts_receivable,
td_functionality=td_raise_invoice,
caption='Raise Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_accounts_receivable,
td_functionality=td_raise_invoice,
caption='Raise Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_accounts_receivable,
td_functionality=td_send_invoice,
caption='Send Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_accounts_receivable,
td_functionality=td_send_invoice,
caption='Send Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_accounts_receivable,
td_functionality=td_confirm_invoice,
caption='Confirm Invoice'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_accounts_receivable,
td_functionality=td_confirm_invoice,
caption='Confirm Invoice')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_accounts_receivable,
td_functionality=td_inward_entry,
caption='Inward Entry'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_accounts_receivable,
td_functionality=td_inward_entry,
caption='Inward Entry')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_accounts_payable,
td_functionality=td_process_payments,
caption='Process Payment'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_accounts_payable,
td_functionality=td_process_payments,
caption='Process Payment')
if not EmployeeRolesFunctionalityMapping.objects.filter(employee_role=emp_role_accounts_payable,
td_functionality=td_reconcile,
caption='Reconcile'):
EmployeeRolesFunctionalityMapping.objects.create(employee_role=emp_role_accounts_payable,
td_functionality=td_reconcile,
caption='Reconcile')
def update_booking_statuses():
# BookingStatuses.objects.all().delete()
if not BookingStatuses.objects.filter(status='confirmed'):
BookingStatuses.objects.create(status='confirmed', time_limit=1440)
else:
BookingStatuses.objects.filter(status='confirmed').update(status='confirmed', time_limit=1440)
if not BookingStatuses.objects.filter(status='loaded'):
BookingStatuses.objects.create(status='loaded', time_limit=1440)
else:
BookingStatuses.objects.filter(status='loaded').update(status='loaded', time_limit=1440)
if not BookingStatuses.objects.filter(status='lr_generated'):
BookingStatuses.objects.create(status='lr_generated', time_limit=1440)
else:
BookingStatuses.objects.filter(status='lr_generated').update(status='lr_generated', time_limit=1440)
if not BookingStatuses.objects.filter(status='advance_paid'):
BookingStatuses.objects.create(status='advance_paid', time_limit=1440)
else:
BookingStatuses.objects.filter(status='advance_paid').update(status='advance_paid', time_limit=1440)
if not BookingStatuses.objects.filter(status='unloaded'):
BookingStatuses.objects.create(status='unloaded', time_limit=14400)
else:
BookingStatuses.objects.filter(status='unloaded').update(status='unloaded', time_limit=14400)
if not BookingStatuses.objects.filter(status='pod_uploaded'):
BookingStatuses.objects.create(status='pod_uploaded', time_limit=1440)
else:
BookingStatuses.objects.filter(status='pod_uploaded').update(status='pod_uploaded', time_limit=1440)
if not BookingStatuses.objects.filter(status='pod_verified'):
BookingStatuses.objects.create(status='pod_verified', time_limit=1440)
else:
BookingStatuses.objects.filter(status='pod_verified').update(status='pod_verified', time_limit=1440)
if not BookingStatuses.objects.filter(status='invoice_raised'):
BookingStatuses.objects.create(status='invoice_raised', time_limit=1440)
else:
BookingStatuses.objects.filter(status='invoice_raised').update(status='invoice_raised', time_limit=1440)
if not BookingStatuses.objects.filter(status='invoice_confirmed'):
BookingStatuses.objects.create(status='invoice_confirmed', time_limit=0)
else:
BookingStatuses.objects.filter(status='invoice_confirmed').update(status='invoice_confirmed', time_limit=0)
if not BookingStatuses.objects.filter(status='balance_paid'):
BookingStatuses.objects.create(status='balance_paid', time_limit=0)
else:
BookingStatuses.objects.filter(status='balance_paid').update(status='balance_paid', time_limit=0)
if not BookingStatuses.objects.filter(status='party_invoice_sent'):
BookingStatuses.objects.create(status='party_invoice_sent', time_limit=4320)
else:
BookingStatuses.objects.filter(status='party_invoice_sent').update(status='party_invoice_sent', time_limit=4320)
if not BookingStatuses.objects.filter(status='inward_followup_completed'):
BookingStatuses.objects.create(status='inward_followup_completed', time_limit=2880)
else:
BookingStatuses.objects.filter(status='inward_followup_completed').update(status='inward_followup_completed',
time_limit=2880)
if BookingStatuses.objects.filter(status='inward_followup'):
BookingStatuses.objects.filter(status='inward_followup', time_limit=0).delete()
if not BookingStatuses.objects.filter(status='complete'):
BookingStatuses.objects.create(status='complete', time_limit=0)
else:
BookingStatuses.objects.filter(status='complete').update(status='complete', time_limit=0)
def update_booking_status_chain():
# BookingStatusChain.objects.all().delete()
bs_confirmed = BookingStatuses.objects.get(status='confirmed')
bs_loaded = BookingStatuses.objects.get(status='loaded')
bs_lr_generated = BookingStatuses.objects.get(status='lr_generated')
bs_advance_paid = BookingStatuses.objects.get(status='advance_paid')
bs_unloaded = BookingStatuses.objects.get(status='unloaded')
bs_pod_uploaded = BookingStatuses.objects.get(status='pod_uploaded')
bs_pod_verified = BookingStatuses.objects.get(status='pod_verified')
bs_invoice_raised = BookingStatuses.objects.get(status='invoice_raised')
bs_invoice_confirmed = BookingStatuses.objects.get(status='invoice_confirmed')
bs_balance_paid = BookingStatuses.objects.get(status='balance_paid')
bs_party_invoice_sent = BookingStatuses.objects.get(status='party_invoice_sent')
bs_inward_followup = BookingStatuses.objects.get(status='inward_followup_completed')
bs_complete = BookingStatuses.objects.get(status='complete')
if not BookingStatusChain.objects.filter(booking_status=bs_confirmed):
BookingStatusChain.objects.create(booking_status=bs_confirmed, level='primary',
primary_preceded_booking_status=bs_confirmed,
primary_succeeded_booking_status=bs_loaded,
secondary_preceded_booking_status=bs_confirmed,
secondary_succeeded_booking_status=bs_loaded)
else:
BookingStatusChain.objects.filter(booking_status=bs_confirmed).update(booking_status=bs_confirmed,
level='primary',
primary_preceded_booking_status=bs_confirmed,
primary_succeeded_booking_status=bs_loaded,
secondary_preceded_booking_status=bs_confirmed,
secondary_succeeded_booking_status=bs_loaded)
if not BookingStatusChain.objects.filter(booking_status=bs_loaded):
BookingStatusChain.objects.create(booking_status=bs_loaded, level='primary',
primary_preceded_booking_status=bs_confirmed,
primary_succeeded_booking_status=bs_lr_generated,
secondary_preceded_booking_status=bs_confirmed,
secondary_succeeded_booking_status=bs_lr_generated)
else:
BookingStatusChain.objects.filter(booking_status=bs_loaded).update(booking_status=bs_loaded, level='primary',
primary_preceded_booking_status=bs_confirmed,
primary_succeeded_booking_status=bs_lr_generated,
secondary_preceded_booking_status=bs_confirmed,
secondary_succeeded_booking_status=bs_lr_generated)
if not BookingStatusChain.objects.filter(booking_status=bs_lr_generated):
BookingStatusChain.objects.create(booking_status=bs_lr_generated, level='primary',
primary_preceded_booking_status=bs_loaded,
primary_succeeded_booking_status=bs_unloaded,
secondary_preceded_booking_status=bs_loaded,
secondary_succeeded_booking_status=bs_advance_paid)
else:
BookingStatusChain.objects.filter(booking_status=bs_lr_generated).update(booking_status=bs_lr_generated,
level='primary',
primary_preceded_booking_status=bs_loaded,
primary_succeeded_booking_status=bs_unloaded,
secondary_preceded_booking_status=bs_loaded,
secondary_succeeded_booking_status=bs_advance_paid)
if not BookingStatusChain.objects.filter(booking_status=bs_advance_paid):
BookingStatusChain.objects.create(booking_status=bs_advance_paid, level='secondary',
primary_preceded_booking_status=bs_lr_generated,
primary_succeeded_booking_status=bs_unloaded,
secondary_preceded_booking_status=bs_lr_generated,
secondary_succeeded_booking_status=bs_unloaded)
else:
BookingStatusChain.objects.filter(booking_status=bs_advance_paid).update(booking_status=bs_advance_paid,
level='secondary',
primary_preceded_booking_status=bs_lr_generated,
primary_succeeded_booking_status=bs_unloaded,
secondary_preceded_booking_status=bs_lr_generated,
secondary_succeeded_booking_status=bs_unloaded)
if not BookingStatusChain.objects.filter(booking_status=bs_unloaded):
BookingStatusChain.objects.create(booking_status=bs_unloaded, level='primary',
primary_preceded_booking_status=bs_lr_generated,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_advance_paid,
secondary_succeeded_booking_status=bs_pod_uploaded)
else:
BookingStatusChain.objects.filter(booking_status=bs_unloaded).update(booking_status=bs_unloaded,
level='primary',
primary_preceded_booking_status=bs_lr_generated,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_advance_paid,
secondary_succeeded_booking_status=bs_pod_uploaded)
if not BookingStatusChain.objects.filter(booking_status=bs_pod_uploaded):
BookingStatusChain.objects.create(booking_status=bs_pod_uploaded, level='secondary',
primary_preceded_booking_status=bs_unloaded,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_unloaded,
secondary_succeeded_booking_status=bs_pod_verified)
else:
BookingStatusChain.objects.filter(booking_status=bs_pod_uploaded).update(booking_status=bs_pod_uploaded,
level='secondary',
primary_preceded_booking_status=bs_unloaded,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_unloaded,
secondary_succeeded_booking_status=bs_pod_verified)
if not BookingStatusChain.objects.filter(booking_status=bs_pod_verified):
BookingStatusChain.objects.create(booking_status=bs_pod_verified, level='secondary',
primary_preceded_booking_status=bs_unloaded,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_pod_uploaded,
secondary_succeeded_booking_status=bs_invoice_raised)
else:
BookingStatusChain.objects.filter(booking_status=bs_pod_verified).update(booking_status=bs_pod_verified,
level='secondary',
primary_preceded_booking_status=bs_unloaded,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_pod_uploaded,
secondary_succeeded_booking_status=bs_invoice_raised)
if not BookingStatusChain.objects.filter(booking_status=bs_invoice_raised):
BookingStatusChain.objects.create(booking_status=bs_invoice_raised, level='primary',
primary_preceded_booking_status=bs_unloaded,
primary_succeeded_booking_status=bs_party_invoice_sent,
secondary_preceded_booking_status=bs_pod_verified,
secondary_succeeded_booking_status=bs_party_invoice_sent)
else:
BookingStatusChain.objects.filter(booking_status=bs_invoice_raised).update(booking_status=bs_invoice_raised,
level='primary',
primary_preceded_booking_status=bs_unloaded,
primary_succeeded_booking_status=bs_party_invoice_sent,
secondary_preceded_booking_status=bs_pod_verified,
secondary_succeeded_booking_status=bs_party_invoice_sent)
if not BookingStatusChain.objects.filter(booking_status=bs_party_invoice_sent):
BookingStatusChain.objects.create(booking_status=bs_party_invoice_sent, level='primary',
primary_preceded_booking_status=bs_invoice_raised,
primary_succeeded_booking_status=bs_invoice_confirmed,
secondary_preceded_booking_status=bs_invoice_raised,
secondary_succeeded_booking_status=bs_invoice_confirmed)
else:
BookingStatusChain.objects.filter(booking_status=bs_party_invoice_sent).update(
booking_status=bs_party_invoice_sent, level='primary',
primary_preceded_booking_status=bs_invoice_raised,
primary_succeeded_booking_status=bs_invoice_confirmed,
secondary_preceded_booking_status=bs_invoice_raised,
secondary_succeeded_booking_status=bs_invoice_confirmed)
if not BookingStatusChain.objects.filter(booking_status=bs_balance_paid):
BookingStatusChain.objects.create(booking_status=bs_balance_paid, level='secondary',
primary_preceded_booking_status=bs_pod_uploaded,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_pod_verified,
secondary_succeeded_booking_status=bs_invoice_raised)
else:
BookingStatusChain.objects.filter(booking_status=bs_balance_paid).update(booking_status=bs_balance_paid,
level='secondary',
primary_preceded_booking_status=bs_pod_uploaded,
primary_succeeded_booking_status=bs_invoice_raised,
secondary_preceded_booking_status=bs_pod_verified,
secondary_succeeded_booking_status=bs_invoice_raised)
if not BookingStatusChain.objects.filter(booking_status=bs_invoice_confirmed):
BookingStatusChain.objects.create(booking_status=bs_invoice_confirmed, level='primary',
primary_preceded_booking_status=bs_party_invoice_sent,
primary_succeeded_booking_status=bs_complete,
secondary_preceded_booking_status=bs_party_invoice_sent,
secondary_succeeded_booking_status=bs_inward_followup)
else:
BookingStatusChain.objects.filter(booking_status=bs_invoice_confirmed).update(
booking_status=bs_invoice_confirmed, level='primary',
primary_preceded_booking_status=bs_party_invoice_sent,
primary_succeeded_booking_status=bs_complete,
secondary_preceded_booking_status=bs_party_invoice_sent,
secondary_succeeded_booking_status=bs_inward_followup)
if not BookingStatusChain.objects.filter(booking_status=bs_inward_followup):
BookingStatusChain.objects.create(booking_status=bs_inward_followup, level='secondary',
primary_preceded_booking_status=bs_invoice_confirmed,
primary_succeeded_booking_status=bs_complete,
secondary_preceded_booking_status=bs_invoice_confirmed,
secondary_succeeded_booking_status=bs_complete)
else:
BookingStatusChain.objects.filter(booking_status=bs_inward_followup).update(booking_status=bs_inward_followup,
level='secondary',
primary_preceded_booking_status=bs_invoice_confirmed,
primary_succeeded_booking_status=bs_complete,
secondary_preceded_booking_status=bs_invoice_confirmed,
secondary_succeeded_booking_status=bs_complete)
if not BookingStatusChain.objects.filter(booking_status=bs_complete):
BookingStatusChain.objects.create(booking_status=bs_complete, level='primary',
primary_preceded_booking_status=bs_invoice_confirmed,
primary_succeeded_booking_status=bs_complete,
secondary_preceded_booking_status=bs_inward_followup,
secondary_succeeded_booking_status=bs_complete)
else:
BookingStatusChain.objects.filter(booking_status=bs_complete).update(booking_status=bs_complete,
level='primary',
primary_preceded_booking_status=bs_invoice_confirmed,
primary_succeeded_booking_status=bs_complete,
secondary_preceded_booking_status=bs_inward_followup,
secondary_succeeded_booking_status=bs_complete)
def prepare_user_data():
update_user_category()
update_employee_roles()
update_td_functionalities()
update_employee_roles_mapping()
update_employee_roles_functionalities_mapping()
update_booking_statuses()
update_booking_status_chain()
def update_mobile_app_version():
MobileAppVersions.objects.create(app_platform='android', app_name='AE', app_version='1.6')
MobileAppVersions.objects.create(app_platform='android', app_name='AO', app_version='1.8')
def remove_old_in_transit_data():
lr_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='lr_generated').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
unloaded_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='unloaded').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
in_transit_bookings = [x for x in lr_bookings if x not in unloaded_bookings]
pods_verified = PODFile.objects.filter(booking__id__in=in_transit_bookings, verified=True, is_valid=True)
user = User.objects.filter(username__iexact='raviaaho')[0]
for pod_v in pods_verified:
booking_unloaded = check_booking_status(pod_v.booking, 'unloaded')
if not booking_unloaded:
create_new_booking_status(pod_v.booking, 'unloaded', user)
booking_pod_uploaded = check_booking_status(pod_v.booking, 'pod_uploaded')
if not booking_pod_uploaded:
create_new_booking_status(pod_v.booking, 'pod_uploaded', user)
else:
update_booking_status(pod_v.booking, 'pod_uploaded', 'in_progress', user)
booking_pod_verified = check_booking_status(pod_v.booking, 'pod_verified')
if not booking_pod_verified:
create_new_booking_status(pod_v.booking, 'pod_verified', user)
else:
update_booking_status(pod_v.booking, 'pod_verified', 'in_progress', user)
# pods_unverified = PODFile.objects.filter(booking__id__in=in_transit_bookings, verified=False, is_valid=False)
# pods_rejected = PODFile.objects.filter(booking__id__in=in_transit_bookings, verified=True, is_valid=False)
def pod_upload_data_sync_in_bsm():
user = User.objects.filter(username__iexact='raviaaho')[0]
for booking in ManualBooking.objects.filter(
Q(pod_status__iexact='unverified')).exclude(
Q(booking_status='cancelled') | Q(deleted=True)).order_by('id'):
booking_unloaded = check_booking_status(booking, 'unloaded')
booking_pod_uploaded = check_booking_status(booking, 'pod_uploaded')
if booking_unloaded and not booking_pod_uploaded:
booking_pod_uploaded = check_booking_status(booking, 'pod_uploaded')
if not booking_pod_uploaded:
create_new_booking_status(booking, 'pod_uploaded', user)
else:
update_booking_status(booking, 'pod_uploaded', 'in_progress', user)
def pod_rejected_data_sync_in_bsm():
user = User.objects.filter(username__iexact='raviaaho')[0]
for booking in ManualBooking.objects.filter(
Q(pod_status__iexact='rejected')).exclude(
Q(booking_status='cancelled') | Q(deleted=True)).order_by('id'):
booking_pod_uploaded = check_booking_status(booking, 'pod_uploaded')
booking_pod_verified = check_booking_status(booking, 'pod_verified')
if not booking_pod_verified:
booking_pod_uploaded = check_booking_status(booking, 'pod_uploaded')
if not booking_pod_uploaded:
create_new_booking_status(booking, 'pod_uploaded', user)
else:
update_booking_status(booking, 'pod_uploaded', 'reverted', user)
def pod_verified_data_sync_in_bsm():
user = User.objects.filter(username__iexact='raviaaho')[0]
for booking in ManualBooking.objects.filter(
Q(pod_status__iexact='completed') & Q(
shipment_date__gte=(datetime.now() - timedelta(days=100)).date())).exclude(
Q(booking_status='cancelled') | Q(deleted=True)).order_by('id'):
booking_unloaded = check_booking_status(booking, 'unloaded')
booking_pod_uploaded = check_booking_status(booking, 'pod_uploaded')
booking_pod_verified = check_booking_status(booking, 'pod_verified')
if not booking_pod_verified and (booking_unloaded or booking_pod_uploaded):
booking_pod_uploaded = check_booking_status(booking, 'unloaded')
if not booking_pod_uploaded:
create_new_booking_status(booking, 'unloaded', user)
else:
update_booking_status(booking, 'pod_uploaded', 'in_progress', user)
booking_pod_uploaded = check_booking_status(booking, 'pod_uploaded')
if not booking_pod_uploaded:
create_new_booking_status(booking, 'pod_uploaded', user)
else:
update_booking_status(booking, 'pod_uploaded', 'in_progress', user)
booking_pod_uploaded = check_booking_status(booking, 'pod_verified')
if not booking_pod_uploaded:
create_new_booking_status(booking, 'pod_verified', user)
else:
update_booking_status(booking, 'pod_verified', 'in_progress', user)
def sync_invoice_status():
user = User.objects.filter(username__iexact='raviaaho')[0]
for invoice in Invoice.objects.filter(payment_received=False):
for booking in invoice.bookings.all():
booking.invoice_status='invoice_confirmed'
booking.save()
booking_balance_paid = check_booking_status(booking, 'invoice_raised')
if not booking_balance_paid:
create_new_booking_status(booking, 'invoice_raised', user)
else:
update_booking_status(booking, 'invoice_raised', 'in_progress', user)
booking_balance_paid = check_booking_status(booking, 'party_invoice_sent')
if not booking_balance_paid:
create_new_booking_status(booking, 'party_invoice_sent', user)
else:
update_booking_status(booking, 'party_invoice_sent', 'in_progress', user)
booking_balance_paid = check_booking_status(booking, 'invoice_confirmed')
if not booking_balance_paid:
create_new_booking_status(booking, 'invoice_confirmed', user)
else:
update_booking_status(booking, 'invoice_confirmed', 'in_progress', user)
def pay_balance_data_sync_up():
user = User.objects.filter(username__iexact='raviaaho')[0]
pod_verified_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='pod_verified').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
balance_paid_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__in=['balance_paid', 'complete']).exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
balance_not_paid_bookings = [x for x in pod_verified_bookings if x not in balance_paid_bookings]
for id in balance_not_paid_bookings:
booking = get_or_none(ManualBooking, id=id)
if booking and (booking.outward_payment_status == 'complete' or booking.outward_payment_status == 'excess'):
booking_balance_paid = check_booking_status(booking, 'balance_paid')
if not booking_balance_paid:
create_new_booking_status(booking, 'balance_paid', user)
else:
update_booking_status(booking, 'balance_paid', 'in_progress', user)
def raise_invoice_data_sync_up():
user = User.objects.filter(username__iexact='raviaaho')[0]
pod_verified_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='pod_verified').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
invoice_raised_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='invoice_raised').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
invoice_not_raised_bookings = [x for x in pod_verified_bookings if x not in invoice_raised_bookings]
for id in invoice_not_raised_bookings:
booking = get_or_none(ManualBooking, id=id)
if booking and booking.invoice_status == 'invoice_raised':
booking_invoice_raised = check_booking_status(booking, 'invoice_raised')
if not booking_invoice_raised:
create_new_booking_status(booking, 'invoice_raised', user)
else:
update_booking_status(booking, 'invoice_raised', 'in_progress', user)
booking_party_invoice_sent = check_booking_status(booking, 'party_invoice_sent')
if not booking_party_invoice_sent:
create_new_booking_status(booking, 'party_invoice_sent', user)
else:
update_booking_status(booking, 'party_invoice_sent', 'in_progress', user)
booking_invoice_confirmed = check_booking_status(booking, 'invoice_confirmed')
if not booking_invoice_confirmed:
create_new_booking_status(booking, 'invoice_confirmed', user)
else:
update_booking_status(booking, 'invoice_confirmed', 'in_progress', user)
if booking and booking.invoice_status == 'invoice_sent':
booking_invoice_raised = check_booking_status(booking, 'invoice_raised')
if not booking_invoice_raised:
create_new_booking_status(booking, 'invoice_raised', user)
else:
update_booking_status(booking, 'invoice_raised', 'in_progress', user)
booking_party_invoice_sent = check_booking_status(booking, 'party_invoice_sent')
if not booking_party_invoice_sent:
create_new_booking_status(booking, 'party_invoice_sent', user)
else:
update_booking_status(booking, 'party_invoice_sent', 'in_progress', user)
booking_invoice_confirmed = check_booking_status(booking, 'invoice_confirmed')
if not booking_invoice_confirmed:
create_new_booking_status(booking, 'invoice_confirmed', user)
else:
update_booking_status(booking, 'invoice_confirmed', 'in_progress', user)
if booking and booking.invoice_status == 'invoice_confirmed':
booking_invoice_raised = check_booking_status(booking, 'invoice_raised')
if not booking_invoice_raised:
create_new_booking_status(booking, 'invoice_raised', user)
else:
update_booking_status(booking, 'invoice_raised', 'in_progress', user)
booking_party_invoice_sent = check_booking_status(booking, 'party_invoice_sent')
if not booking_party_invoice_sent:
create_new_booking_status(booking, 'party_invoice_sent', user)
else:
update_booking_status(booking, 'party_invoice_sent', 'in_progress', user)
booking_invoice_confirmed = check_booking_status(booking, 'invoice_confirmed')
if not booking_invoice_confirmed:
create_new_booking_status(booking, 'invoice_confirmed', user)
else:
update_booking_status(booking, 'invoice_confirmed', 'in_progress', user)
def sent_invoice_data_sync_up():
user = User.objects.filter(username__iexact='raviaaho')[0]
invoice_raised_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='invoice_raised').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
party_invoice_sent_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='party_invoice_sent').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
invoice_not_sent_bookings = [x for x in invoice_raised_bookings if x not in party_invoice_sent_bookings]
for id in invoice_not_sent_bookings:
booking = get_or_none(ManualBooking, id=id)
if booking and (booking.invoice_status == 'invoice_raised' or booking.invoice_status == 'invoice_sent'
or booking.invoice_status == 'invoice_confirmed'):
booking_invoice_raised = check_booking_status(booking, 'invoice_raised')
if not booking_invoice_raised:
create_new_booking_status(booking, 'invoice_raised', user)
booking_party_invoice_sent = check_booking_status(booking, 'party_invoice_sent')
if not booking_party_invoice_sent:
create_new_booking_status(booking, 'party_invoice_sent', user)
else:
update_booking_status(booking, 'party_invoice_sent', 'in_progress', user)
booking_invoice_confirmed = check_booking_status(booking, 'invoice_confirmed')
if not booking_invoice_confirmed:
create_new_booking_status(booking, 'invoice_confirmed', user)
else:
update_booking_status(booking, 'invoice_confirmed', 'in_progress', user)
def confirm_invoice_data_sync_up():
user = User.objects.filter(username__iexact='raviaaho')[0]
party_invoice_sent_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='party_invoice_sent').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
invoice_confirmed_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='invoice_confirmed').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
invoice_not_confirmed_bookings = [x for x in party_invoice_sent_bookings if x not in invoice_confirmed_bookings]
for id in invoice_not_confirmed_bookings:
booking = get_or_none(ManualBooking, id=id)
if booking and (booking.invoice_status == 'invoice_raised' or booking.invoice_status == 'invoice_sent'
or booking.invoice_status == 'invoice_confirmed'):
booking_invoice_raised = check_booking_status(booking, 'invoice_raised')
if not booking_invoice_raised:
create_new_booking_status(booking, 'invoice_raised', user)
booking_party_invoice_sent = check_booking_status(booking, 'party_invoice_sent')
if not booking_party_invoice_sent:
create_new_booking_status(booking, 'party_invoice_sent', user)
booking_invoice_confirmed = check_booking_status(booking, 'invoice_confirmed')
if not booking_invoice_confirmed:
create_new_booking_status(booking, 'invoice_confirmed', user)
else:
update_booking_status(booking, 'invoice_confirmed', 'in_progress', user)
def inward_followup_completed_sync_up():
user = User.objects.filter(username__iexact='raviaaho')[0]
invoice_confirmed_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='invoice_confirmed').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
complete_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='inward_followup_completed').exclude(deleted=True). \
values_list('manual_booking_id', flat=True)
pending_payments_bookings = [x for x in invoice_confirmed_bookings if x not in complete_bookings]
for id in pending_payments_bookings:
booking = get_or_none(ManualBooking, id=id)
if booking and (booking.inward_payment_status == 'full_received' or booking.inward_payment_status == 'excess'):
booking_inward_followup_completed = check_booking_status(booking, 'inward_followup_completed')
if not booking_inward_followup_completed:
create_new_booking_status(booking, 'inward_followup_completed', user)
else:
update_booking_status(booking, 'inward_followup_completed', 'in_progress', user)
def save_all_manual_bookings():
for booking in ManualBooking.objects.filter(id__gte=9231).exclude(booking_status='cancelled'):
# try:
booking.save()
# except:
# print(booking)
| 76.697552
| 140
| 0.577044
| 7,171
| 87,742
| 6.678706
| 0.028587
| 0.074917
| 0.049485
| 0.062681
| 0.890109
| 0.830497
| 0.786545
| 0.746811
| 0.714384
| 0.692711
| 0
| 0.002077
| 0.363372
| 87,742
| 1,144
| 141
| 76.697552
| 0.855314
| 0.004901
| 0
| 0.715477
| 0
| 0
| 0.074213
| 0.002921
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018536
| false
| 0
| 0.010195
| 0
| 0.02873
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ecb38855a4a06ca96220fb07d0522c3e0dc14da
| 137
|
py
|
Python
|
RobotSimulation/PlanningCore/robot/robot.py
|
benbenlijie/BilliardRobot
|
13f72c045c69a0a5ffb8a3cfc3f90897519dabb1
|
[
"Apache-2.0"
] | null | null | null |
RobotSimulation/PlanningCore/robot/robot.py
|
benbenlijie/BilliardRobot
|
13f72c045c69a0a5ffb8a3cfc3f90897519dabb1
|
[
"Apache-2.0"
] | null | null | null |
RobotSimulation/PlanningCore/robot/robot.py
|
benbenlijie/BilliardRobot
|
13f72c045c69a0a5ffb8a3cfc3f90897519dabb1
|
[
"Apache-2.0"
] | null | null | null |
class Robot(object):
def __init__(self, pos):
self.pos = pos
def __repr__(self):
return f'Robot(pos={self.pos})'
| 22.833333
| 39
| 0.591241
| 19
| 137
| 3.842105
| 0.526316
| 0.287671
| 0.273973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.262774
| 137
| 6
| 39
| 22.833333
| 0.722772
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9edeb49bd3a67ec8793f5580994357f67f17b467
| 273
|
py
|
Python
|
lvl8.py
|
Tarrke/python-challenge
|
65f3b3d4635036c02f76faf6b47912261183117b
|
[
"MIT"
] | null | null | null |
lvl8.py
|
Tarrke/python-challenge
|
65f3b3d4635036c02f76faf6b47912261183117b
|
[
"MIT"
] | null | null | null |
lvl8.py
|
Tarrke/python-challenge
|
65f3b3d4635036c02f76faf6b47912261183117b
|
[
"MIT"
] | 1
|
2019-04-11T17:39:00.000Z
|
2019-04-11T17:39:00.000Z
|
import bz2
un = b'BZh91AY&SYA\xaf\x82\r\x00\x00\x01\x01\x80\x02\xc0\x02\x00 \x00!\x9ah3M\x07<]\xc9\x14\xe1BA\x06\xbe\x084'
pw = b'BZh91AY&SY\x94$|\x0e\x00\x00\x00\x81\x00\x03$ \x00!\x9ah3M\x13<]\xc9\x14\xe1BBP\x91\xf08'
print(bz2.decompress(un))
print(bz2.decompress(pw))
| 39
| 111
| 0.717949
| 55
| 273
| 3.563636
| 0.618182
| 0.122449
| 0.183673
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.269231
| 0.047619
| 273
| 7
| 112
| 39
| 0.484615
| 0
| 0
| 0
| 0
| 0.4
| 0.69708
| 0.689781
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.4
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
731c805724b83ac98479e07961f53fa125f9c35e
| 166
|
py
|
Python
|
dataduct/data_access/__init__.py
|
hillsdale18/ProjectX
|
4518d724eeb8ac73a6eae1d076d4846244e0944a
|
[
"Apache-2.0"
] | null | null | null |
dataduct/data_access/__init__.py
|
hillsdale18/ProjectX
|
4518d724eeb8ac73a6eae1d076d4846244e0944a
|
[
"Apache-2.0"
] | null | null | null |
dataduct/data_access/__init__.py
|
hillsdale18/ProjectX
|
4518d724eeb8ac73a6eae1d076d4846244e0944a
|
[
"Apache-2.0"
] | 1
|
2020-05-12T08:54:38.000Z
|
2020-05-12T08:54:38.000Z
|
from .connection import get_sql_config
from .connection import rds_connection
from .connection import get_redshift_config
from .connection import redshift_connection
| 33.2
| 43
| 0.879518
| 22
| 166
| 6.363636
| 0.363636
| 0.4
| 0.571429
| 0.328571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 166
| 4
| 44
| 41.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7336673ea1c889bfa7526fe3bddf91b1ba1aea0c
| 134
|
py
|
Python
|
pytorch/torch/nn/_functions/thnn/__init__.py
|
raghavnauhria/whatmt
|
c20483a437c82936cb0fb8080925e37b9c4bba87
|
[
"MIT"
] | 15
|
2019-08-10T02:36:38.000Z
|
2021-07-14T13:45:32.000Z
|
torch/nn/_functions/thnn/__init__.py
|
wxwoods/mctorch
|
7cd6eb51fdd01fa75ed9245039a4f145ba342de2
|
[
"BSD-3-Clause"
] | 7
|
2019-10-21T03:08:51.000Z
|
2022-03-11T23:54:28.000Z
|
pytorch/torch/nn/_functions/thnn/__init__.py
|
raghavnauhria/whatmt
|
c20483a437c82936cb0fb8080925e37b9c4bba87
|
[
"MIT"
] | 5
|
2019-09-27T02:41:40.000Z
|
2021-11-05T20:40:49.000Z
|
_all_functions = []
from .auto import * # noqa: F401
from .normalization import * # noqa: F401
from .sparse import * # noqa: F401
| 22.333333
| 42
| 0.679104
| 17
| 134
| 5.235294
| 0.529412
| 0.337079
| 0.47191
| 0.404494
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084906
| 0.208955
| 134
| 5
| 43
| 26.8
| 0.754717
| 0.238806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b40da8e283c71da438e124a58cb7cb2f576008b5
| 49
|
py
|
Python
|
corehq/apps/locations/exceptions.py
|
dslowikowski/commcare-hq
|
ad8885cf8dab69dc85cb64f37aeaf06106124797
|
[
"BSD-3-Clause"
] | 1
|
2015-02-10T23:26:39.000Z
|
2015-02-10T23:26:39.000Z
|
corehq/apps/locations/exceptions.py
|
SEL-Columbia/commcare-hq
|
992ee34a679c37f063f86200e6df5a197d5e3ff6
|
[
"BSD-3-Clause"
] | 1
|
2022-03-12T01:03:25.000Z
|
2022-03-12T01:03:25.000Z
|
corehq/apps/locations/exceptions.py
|
johan--/commcare-hq
|
86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd
|
[
"BSD-3-Clause"
] | null | null | null |
class LocationImportError(Exception):
pass
| 9.8
| 37
| 0.755102
| 4
| 49
| 9.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183673
| 49
| 4
| 38
| 12.25
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b411673955c3db6ce5208eb34fd86c21bb126ab6
| 23,682
|
py
|
Python
|
produce_pos.py
|
QihanW/Skeleton-based-3D-body-model
|
ae4f4cd374fcf86c6cc6558a2c6726bd773f159b
|
[
"Apache-2.0"
] | 2
|
2020-05-22T13:20:40.000Z
|
2021-01-05T14:07:04.000Z
|
produce_pos.py
|
QihanW/Skeleton-based-3D-body-model
|
ae4f4cd374fcf86c6cc6558a2c6726bd773f159b
|
[
"Apache-2.0"
] | 1
|
2021-01-05T14:10:31.000Z
|
2021-01-05T14:10:31.000Z
|
produce_pos.py
|
QihanW/Skeleton-based-3D-body-model
|
ae4f4cd374fcf86c6cc6558a2c6726bd773f159b
|
[
"Apache-2.0"
] | null | null | null |
from sympy import *
from math import *
lines = []
angles = []
dep_tru = []
dep_res = []
index =[]
x = []
y = []
z = []
x_res = []
y_res = []
z_res = []
x_error=[]
x_dis=[]
y_error=[]
y_dis=[]
z_error=[]
z_dis=[]
def onepoint(d_ab, d_ac, dep_b, dep_a, angle, a_x, a_y, c_x, c_y):
def distance(x, y):
xx = (x[0] - y[0]) ** 2
yy = (x[1] - y[1]) ** 2
zz = (x[2] - y[2]) ** 2
return (sqrt(xx + yy + zz))
# comput the intput values
#d_ab = distance(a, b)
#d_ac = distance(a, c)
#zz = (a[2] - b[2]) / d_ab
#theta = asin(zz)
#angle = acos((d_ab ** 2 + d_ac ** 2 - distance(b, c) ** 2) / (2 * d_ab * d_ac))
# print(angle)
#print(theta)
# compute outputs
b_a = d_ab * sin(dep_b)
d_bc = sqrt(d_ab ** 2 + d_ac ** 2 - 2 * d_ab * d_ac * cos(angle))
c_a = d_ac * sin(dep_a)
ab = d_ab ** 2 - (b_a) ** 2
bc = d_bc ** 2 - (c_a + b_a) ** 2
ra = sqrt(ab)
rc = sqrt(bc)
x = Symbol('x')
y = Symbol('y')
res = solve([(x - a_x) ** 2 + (y - a_y) ** 2 - ab, (x - c_x) ** 2 + (y - c_y) ** 2 - bc], [x, y])
final_res=[list(res[0]), list(res[1])]
return final_res
def onepoint2(d_ab, d_ac, dep_b, angle, a_x, a_y, a_z, c_x, c_y, c_z):
def distance(x, y):
xx = (x[0] - y[0]) ** 2
yy = (x[1] - y[1]) ** 2
zz = (x[2] - y[2]) ** 2
return (sqrt(xx + yy + zz))
# comput the intput values
#d_ab = distance(a, b)
#d_ac = distance(a, c)
#zz = (a[2] - b[2]) / d_ab
#theta = asin(zz)
#angle = acos((d_ab ** 2 + d_ac ** 2 - distance(b, c) ** 2) / (2 * d_ab * d_ac))
# print(angle)
#print(theta)
# compute outputs
b_a = d_ab * sin(dep_b)
d_bc = sqrt(d_ab ** 2 + d_ac ** 2 - 2 * d_ab * d_ac * cos(angle))
c_b = c_z-a_z+b_a
ab = abs(d_ab ** 2 - b_a ** 2)
bc = abs(d_bc ** 2 - c_b ** 2)
#ra = sqrt(ab)
#rc = sqrt(bc)
x = Symbol('x')
y = Symbol('y')
res = solve([(x - a_x) ** 2 + (y - a_y) ** 2 - ab, (x - c_x) ** 2 + (y - c_y) ** 2 - bc], [x, y])
final_res=[list(res[0]), list(res[1])]
return final_res
def read_test():
tmp1=[]
with open("data/allMotion05dep.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp1.append(float(kk))
tmp2=[]
with open("data/test_selected05.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp2.append(float(kk))
tmp_all = []
tmp_part = []
for i in range(int(len(tmp1)/20)):
tmp = []
for j in range(20):
tmp.append(tmp1[i*20+j])
tmp_all.append(tmp)
for i in range(int(len(tmp2)/20)):
tmp = []
for j in range(20):
tmp.append(tmp2[i*20+j])
tmp_part.append(tmp)
dep_tru.append(tmp)
index=[]
#print(len(tmp_all))
#print(len(tmp_part))
#print(tmp_part)
for list in tmp_part:
for i in range(int(len(tmp_all))):
if tmp_all[i] == list:
#print(tmp_list)
index.append(i)
break
#for j in tmp_part:
#print(j)
#print(len(index))
return index
#read_test()
def read_xyd():
index = read_test()
#read lines, angles and depths
tmp1=[]
with open("data/linesallMotion10.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp1.append(float(kk))
tmp2 = []
with open("data/allMotion05angles.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp2.append(float(kk))
tmp3 = []
with open("data/test_results05.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp3.append(float(kk))
tmp_x=[]
for i in range(int(len(tmp1)/19)):
tmp = []
for j in range(19):
tmp.append(tmp1[i*19+j])
tmp_x.append(tmp)
tmp_y=[]
for i in range(int(len(tmp2)/18)):
tmp = []
for j in range(18):
tmp.append(tmp2[i * 18 + j])
tmp_y.append(tmp)
for i in range(int(len(tmp3)/20)):
tmp = []
for j in range(20):
tmp.append(tmp3[i * 20 + j])
dep_res.append(tmp)
for i in index:
lines.append(tmp_x[i])
angles.append(tmp_y[i])
#read x, y, z
tmp1 = []
with open("data/axies_allMotion05x.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp1.append(float(kk))
tmp2 = []
with open("data/axies_allMotion05y.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp2.append(float(kk))
tmp3 = []
with open("data/axies_allMotion05z.txt", 'r') as f:
for line in f.readlines():
for kk in (line.strip().split(" ")):
tmp3.append(float(kk))
tmp_x = []
for i in range(int(len(tmp1)/20)):
tmp = []
for j in range(20):
tmp.append(tmp1[i * 20 + j])
tmp_x.append(tmp)
tmp_y = []
for i in range(int(len(tmp2)/20)):
tmp = []
for j in range(20):
tmp.append(tmp2[i * 20 + j])
tmp_y.append(tmp)
tmp_z = []
for i in range(int(len(tmp2)/20)):
tmp = []
for j in range(20):
tmp.append(tmp3[i * 20 + j])
tmp_z.append(tmp)
for i in index:
x.append(tmp_x[i])
y.append(tmp_y[i])
z.append(tmp_z[i])
#print(x[0])
#print(len(y))
#print(len(z))
#print(len(lines))
#print(len(angles))
#print(len(dep_tru))
#print(len(dep_res))
#read_xyd()
def produce_pos_1():
read_xyd()
#onepoint(d_ab, d_ac, dep_b, dep_c, angle, a_x, a_y, c_x, c_y)
#compute the right elbow
x_t = []
y_t = []
for i in range(525):
#i=0
res = onepoint2(lines[i][3], lines[i][1], dep_res[i][7], angles[i][2], x[i][0], y[i][0], z[i][0], x[i][2], y[i][2], z[i][2])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][7]-x1) + abs(y[i][7]-y1)
dis2 = abs(x[i][7]-x2) + abs(y[i][7]-y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i]-x[i][7])/abs(x[i][7])
sum2 = sum2 + abs(y_t[i] - y[i][7]) / abs(y[i][7])
sum3 = sum3 + abs(x_t[i]-x[i][7])
sum4 = sum4 + abs(y_t[i] - y[i][7])
x_error.append(sum1/525)
y_error.append(sum2/525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(1)
print(sum1/525)
print(sum2/525)
print(sum3 / 525)
print(sum4 / 525)
#compute the right wrist
x_t = []
y_t = []
for i in range(525):
# i=0
res = onepoint2(lines[i][4], lines[i][3], dep_res[i][9], angles[i][4], x[i][7], y[i][7], z[i][7], x[i][0],
y[i][0], z[i][0])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][9] - x1) + abs(y[i][9] - y1)
dis2 = abs(x[i][9] - x2) + abs(y[i][9] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][9]) / abs(x[i][9])
sum2 = sum2 + abs(y_t[i] - y[i][9]) / abs(y[i][9])
sum3 = sum3 + abs(x_t[i] - x[i][9])
sum4 = sum4 + abs(y_t[i] - y[i][9])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(2)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the right hand
x_t = []
y_t = []
for i in range(525):
# i=0
res = onepoint2(lines[i][5], lines[i][4], dep_res[i][11], angles[i][6], x[i][9], y[i][9], z[i][9], x[i][7],
y[i][7], z[i][7])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][11] - x1) + abs(y[i][11] - y1)
dis2 = abs(x[i][11] - x2) + abs(y[i][11] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][11]) / abs(x[i][11])
sum2 = sum2 + abs(y_t[i] - y[i][11]) / abs(y[i][11])
sum3 = sum3 + abs(x_t[i] - x[i][11])
sum4 = sum4 + abs(y_t[i] - y[i][11])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(3)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the left elbow
x_t = []
y_t = []
for i in range(525):
# i=0
res = onepoint2(lines[i][6], lines[i][2], dep_res[i][8], angles[i][3], x[i][1], y[i][1], z[i][1], x[i][2],
y[i][2], z[i][2])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][8] - x1) + abs(y[i][8] - y1)
dis2 = abs(x[i][8] - x2) + abs(y[i][8] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][8]) / abs(x[i][8])
sum2 = sum2 + abs(y_t[i] - y[i][8]) / abs(y[i][8])
sum3 = sum3 + abs(x_t[i] - x[i][8])
sum4 = sum4 + abs(y_t[i] - y[i][8])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(4)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the left wrist
x_t = []
y_t = []
for i in range(525):
# i=0
res = onepoint2(lines[i][7], lines[i][6], dep_res[i][10], angles[i][5], x[i][8], y[i][8], z[i][8], x[i][1],
y[i][1], z[i][1])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][10] - x1) + abs(y[i][10] - y1)
dis2 = abs(x[i][10] - x2) + abs(y[i][10] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][10]) / abs(x[i][10])
sum2 = sum2 + abs(y_t[i] - y[i][10]) / abs(y[i][10])
sum3 = sum3 + abs(x_t[i] - x[i][10])
sum4 = sum4 + abs(y_t[i] - y[i][10])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(5)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the left hand
x_t = []
y_t = []
num = 12
for i in range(525):
# i=0
res = onepoint2(lines[i][8], lines[i][7], dep_res[i][12], angles[i][7], x[i][10], y[i][10], z[i][10], x[i][8],
y[i][8], z[i][8])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1)
dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num])
sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num])
sum3 = sum3 + abs(x_t[i] - x[i][num])
sum4 = sum4 + abs(y_t[i] - y[i][num])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(6)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the left knee
x_t = []
y_t = []
num = 13
for i in range(525):
# i=0
res = onepoint2(lines[i][12], lines[i][11], dep_res[i][13], angles[i][12], x[i][4], y[i][4], z[i][4], x[i][6],
y[i][6], z[i][6])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1)
dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num])
sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num])
sum3 = sum3 + abs(x_t[i] - x[i][num])
sum4 = sum4 + abs(y_t[i] - y[i][num])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(7)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the left ankle
x_t = []
y_t = []
num = 15
for i in range(525):
# i=0
res = onepoint2(lines[i][13], lines[i][12], dep_res[i][15], angles[i][14], x[i][13], y[i][13], z[i][13], x[i][4],
y[i][4], z[i][4])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1)
dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num])
sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num])
sum3 = sum3 + abs(x_t[i] - x[i][num])
sum4 = sum4 + abs(y_t[i] - y[i][num])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(8)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the left foot
x_t = []
y_t = []
num = 17
for i in range(525):
# i=0
res = onepoint2(lines[i][14], lines[i][13], dep_res[i][17], angles[i][16], x[i][15], y[i][15], z[i][15], x[i][13],
y[i][13], z[i][13])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1)
dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num])
sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num])
sum3 = sum3 + abs(x_t[i] - x[i][num])
sum4 = sum4 + abs(y_t[i] - y[i][num])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(9)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the right knee
x_t = []
y_t = []
num = 14
for i in range(525):
# i=0
res = onepoint2(lines[i][16], lines[i][15], dep_res[i][14], angles[i][13], x[i][5], y[i][5], z[i][5], x[i][6],
y[i][6], z[i][6])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1)
dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num])
sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num])
sum3 = sum3 + abs(x_t[i] - x[i][num])
sum4 = sum4 + abs(y_t[i] - y[i][num])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(10)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the right ankle
x_t = []
y_t = []
num = 16
for i in range(525):
# i=0
res = onepoint2(lines[i][17], lines[i][16], dep_res[i][16], angles[i][15], x[i][14], y[i][14], z[i][14], x[i][5],
y[i][5], z[i][5])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1)
dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num])
sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num])
sum3 = sum3 + abs(x_t[i] - x[i][num])
sum4 = sum4 + abs(y_t[i] - y[i][num])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(11)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
# compute the right foot
x_t = []
y_t = []
num = 18
for i in range(525):
# i=0
res = onepoint2(lines[i][18], lines[i][17], dep_res[i][18], angles[i][17], x[i][16], y[i][16], z[i][16], x[i][14],
y[i][14], z[i][14])
x1 = res[0][0]
x2 = res[1][0]
y1 = res[0][1]
y2 = res[1][1]
dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1)
dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2)
if dis1 < dis2:
x_t.append(x1)
y_t.append(y1)
else:
x_t.append(x2)
y_t.append(y2)
sum1 = 0;
sum2 = 0;
sum3 = 0;
sum4 = 0;
for i in range(525):
sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num])
sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num])
sum3 = sum3 + abs(x_t[i] - x[i][num])
sum4 = sum4 + abs(y_t[i] - y[i][num])
x_error.append(sum1 / 525)
y_error.append(sum2 / 525)
x_dis.append(sum3 / 525)
y_dis.append(sum4 / 525)
print(12)
print(sum1 / 525)
print(sum2 / 525)
print(sum3 / 525)
print(sum4 / 525)
#produce_pos_1()
def produce_z():
read_xyd()
#print(len(z_res))
sum1=0
sum2=0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][7])-sin(dep_res[i][7]))*lines[i][3]
sum2 = sum2 + abs(sin(dep_tru[i][7])-sin(dep_res[i][7]))/abs(sin(dep_tru[i][7])*lines[i][3])
z_dis.append(sum1/525)
z_error.append(sum2/525)
print(sum1/525)
print(sum2/525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][9]) - sin(dep_res[i][9])) * lines[i][4]
sum2 = sum2 + abs(sin(dep_tru[i][9]) - sin(dep_res[i][9])) / abs(sin(dep_tru[i][9]) * lines[i][4])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][11]) - sin(dep_res[i][11])) * lines[i][5]
sum2 = sum2 + abs(sin(dep_tru[i][11]) - sin(dep_res[i][11])) / abs(sin(dep_tru[i][11]) * lines[i][5])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][8]) - sin(dep_res[i][8])) * lines[i][6]
sum2 = sum2 + abs(sin(dep_tru[i][8]) - sin(dep_res[i][8])) / abs(sin(dep_tru[i][8]) * lines[i][6])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][10]) - sin(dep_res[i][10])) * lines[i][7]
sum2 = sum2 + abs(sin(dep_tru[i][10]) - sin(dep_res[i][10])) / abs(sin(dep_tru[i][10]) * lines[i][7])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][12]) - sin(dep_res[i][12])) * lines[i][8]
sum2 = sum2 + abs(sin(dep_tru[i][12]) - sin(dep_res[i][12])) / abs(sin(dep_tru[i][12]) * lines[i][8])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][13]) - sin(dep_res[i][13])) * lines[i][12]
sum2 = sum2 + abs(sin(dep_tru[i][13]) - sin(dep_res[i][13])) / abs(sin(dep_tru[i][13]) * lines[i][12])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][15]) - sin(dep_res[i][15])) * lines[i][13]
sum2 = sum2 + abs(sin(dep_tru[i][15]) - sin(dep_res[i][15])) / abs(sin(dep_tru[i][15]) * lines[i][13])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][17]) - sin(dep_res[i][17])) * lines[i][14]
sum2 = sum2 + abs(sin(dep_tru[i][17]) - sin(dep_res[i][17])) / abs(sin(dep_tru[i][17]) * lines[i][14])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][14]) - sin(dep_res[i][14])) * lines[i][16]
sum2 = sum2 + abs(sin(dep_tru[i][14]) - sin(dep_res[i][14])) / abs(sin(dep_tru[i][14]) * lines[i][16])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][16]) - sin(dep_res[i][16])) * lines[i][17]
sum2 = sum2 + abs(sin(dep_tru[i][16]) - sin(dep_res[i][16])) / abs(sin(dep_tru[i][16]) * lines[i][17])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
sum1 = 0
sum2 = 0
for i in range(525):
sum1 = sum1 + abs(sin(dep_tru[i][18]) - sin(dep_res[i][18])) * lines[i][18]
sum2 = sum2 + abs(sin(dep_tru[i][18]) - sin(dep_res[i][18])) / abs(sin(dep_tru[i][18]) * lines[i][18])
z_dis.append(sum1 / 525)
z_error.append(sum2 / 525)
print(sum1 / 525)
print(sum2 / 525)
produce_z()
| 28.498195
| 132
| 0.47192
| 4,111
| 23,682
| 2.619314
| 0.033812
| 0.015973
| 0.026189
| 0.04597
| 0.867385
| 0.838039
| 0.810643
| 0.797455
| 0.765137
| 0.748236
| 0
| 0.116146
| 0.32814
| 23,682
| 831
| 133
| 28.498195
| 0.560618
| 0.049785
| 0
| 0.767575
| 0
| 0
| 0.009894
| 0.009003
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011478
| false
| 0
| 0.002869
| 0
| 0.021521
| 0.120517
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b44637b410314fdbc0624b55abda0beba71f568a
| 178
|
py
|
Python
|
tokopedia/product/api/product_api.py
|
hexatester/tokopedia
|
20e46c3ec2c70de6b24460634b7c185ffdb15691
|
[
"MIT"
] | 5
|
2021-07-01T05:09:20.000Z
|
2022-03-06T10:53:07.000Z
|
tokopedia/product/api/product_api.py
|
hexatester/tokopedia
|
20e46c3ec2c70de6b24460634b7c185ffdb15691
|
[
"MIT"
] | null | null | null |
tokopedia/product/api/product_api.py
|
hexatester/tokopedia
|
20e46c3ec2c70de6b24460634b7c185ffdb15691
|
[
"MIT"
] | 1
|
2022-02-14T01:20:34.000Z
|
2022-02-14T01:20:34.000Z
|
from . import GetProductApi
from . import GetProductVariantApi
from . import CreateProductApi
class ProductApi(GetProductApi, GetProductVariantApi, CreateProductApi):
pass
| 22.25
| 72
| 0.825843
| 15
| 178
| 9.8
| 0.533333
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129213
| 178
| 7
| 73
| 25.428571
| 0.948387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b489f684242a7fd677552dbd157a6931b370ae47
| 21,997
|
py
|
Python
|
models/character_coherence.py
|
Jamin-Chen/11711_COMICS
|
1c378eae07644b7f9d2e9ef5893c7c95310c06fb
|
[
"MIT"
] | null | null | null |
models/character_coherence.py
|
Jamin-Chen/11711_COMICS
|
1c378eae07644b7f9d2e9ef5893c7c95310c06fb
|
[
"MIT"
] | null | null | null |
models/character_coherence.py
|
Jamin-Chen/11711_COMICS
|
1c378eae07644b7f9d2e9ef5893c7c95310c06fb
|
[
"MIT"
] | null | null | null |
import theano, cPickle, lasagne, random, csv, gzip, time, argparse, sys
import numpy as np
import h5py as h5
import theano.tensor as T
from collections import Counter
from preprocess.character_coherence_minibatching import *
from layers_and_utils import *
# compute accuracy over a fold
def validate(fold_name, fold_data, fold_file, val_batch_size=1024):
batches = [(x, x + val_batch_size) for x in range(0, len(fold_data[0]), val_batch_size)]
correct = 0.
total = 0.
for start, end in batches:
for batch in generate_minibatches_from_megabatch(fold_data, vdict, start, end, fold_dict=read_fold(fold_file, vdict), shuffle_candidates=True):
prods = pred_fn(*batch[1:-1])
labels = np.argmax(batch[-1], axis=-1)
max_prods = np.argmax(prods, axis=-1)
for i in range(prods.shape[0]):
if max_prods[i] == labels[i]:
correct += 1
total += 1
return 'fold %s: got %d out of %d correct for %f accuracy' % (fold_name, correct, total, correct/total)
'''NETWORK ASSEMBLY'''
def build_text_only_network(d_word, d_hidden, lr, eps=1e-6):
# input theano vars
in_context_fc7 = T.tensor3(name='context_images')
in_context_bb = T.tensor4(name='context_bb')
in_bbmask = T.tensor3(name='bounding_box_mask')
in_context = T.itensor4(name='context')
in_cmask = T.tensor4(name='context_mask')
in_answer_fc7 = T.matrix(name='answer_images')
in_answer_bb = T.tensor3(name='answer_bb')
in_ans1 = T.itensor3(name='answers')
in_amask1 = T.tensor3(name='answer_mask')
in_ans2 = T.itensor3(name='answers')
in_amask2 = T.tensor3(name='answer_mask')
in_labels = T.imatrix(name='labels')
# define network
l_context = lasagne.layers.InputLayer(shape=(None, max_panels, max_boxes, max_words),
input_var=in_context)
l_ans1 = lasagne.layers.InputLayer(shape=(None, 2, max_words), input_var=in_ans1)
l_ans2 = lasagne.layers.InputLayer(shape=(None, 2, max_words), input_var=in_ans2)
l_cmask = lasagne.layers.InputLayer(shape=l_context.shape, input_var=in_cmask)
l_amask1 = lasagne.layers.InputLayer(shape=l_ans1.shape, input_var=in_amask1)
l_amask2 = lasagne.layers.InputLayer(shape=l_ans2.shape, input_var=in_amask2)
l_bbmask = lasagne.layers.InputLayer(shape=(None, 3, max_boxes), input_var=in_bbmask)
# contexts and answers should share embeddings
l_context_emb = lasagne.layers.EmbeddingLayer(l_context, len_voc,
d_word, name='word_emb')
l_ans1_emb = lasagne.layers.EmbeddingLayer(l_ans1, len_voc,
d_word, W=l_context_emb.W)
l_ans2_emb = lasagne.layers.EmbeddingLayer(l_ans2, len_voc,
d_word, W=l_context_emb.W)
l_context_box_reps = SumAverageLayer([l_context_emb, l_cmask], compute_sum=True, num_dims=4)
l_box_reshape = lasagne.layers.ReshapeLayer(l_context_box_reps, (-1, max_boxes, d_word))
l_bbmask_reshape = lasagne.layers.ReshapeLayer(l_bbmask, (-1, max_boxes))
l_box_lstm = lasagne.layers.LSTMLayer(l_box_reshape, num_units=d_word, mask_input=l_bbmask_reshape, only_return_final=True)
l_context_panel_reps = lasagne.layers.ReshapeLayer(l_box_lstm, (-1, 3, d_word))
l_context_final_reps = lasagne.layers.LSTMLayer(l_context_panel_reps, num_units=d_hidden, only_return_final=True)
l_ans1_reps = SumAverageLayer([l_ans1_emb, l_amask1], compute_sum=True, num_dims=3)
l_ans1_panel_reps = lasagne.layers.LSTMLayer(l_ans1_reps, num_units=d_hidden, only_return_final=True,
ingate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_ingate,
W_hid=l_box_lstm.W_hid_to_ingate,
W_cell=l_box_lstm.W_cell_to_ingate,
b=l_box_lstm.b_ingate),
outgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_outgate,
W_hid=l_box_lstm.W_hid_to_outgate,
W_cell=l_box_lstm.W_cell_to_outgate,
b=l_box_lstm.b_outgate),
forgetgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_forgetgate,
W_hid=l_box_lstm.W_hid_to_forgetgate,
W_cell=l_box_lstm.W_cell_to_forgetgate,
b=l_box_lstm.b_forgetgate),
cell=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_cell,
W_hid=l_box_lstm.W_hid_to_cell,
W_cell=None,
b=l_box_lstm.b_cell) )
l_ans2_reps = SumAverageLayer([l_ans2_emb, l_amask2], compute_sum=True, num_dims=3)
l_ans2_panel_reps = lasagne.layers.LSTMLayer(l_ans2_reps, num_units=d_hidden, only_return_final=True,
ingate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_ingate,
W_hid=l_box_lstm.W_hid_to_ingate,
W_cell=l_box_lstm.W_cell_to_ingate,
b=l_box_lstm.b_ingate),
outgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_outgate,
W_hid=l_box_lstm.W_hid_to_outgate,
W_cell=l_box_lstm.W_cell_to_outgate,
b=l_box_lstm.b_outgate),
forgetgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_forgetgate,
W_hid=l_box_lstm.W_hid_to_forgetgate,
W_cell=l_box_lstm.W_cell_to_forgetgate,
b=l_box_lstm.b_forgetgate),
cell=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_cell,
W_hid=l_box_lstm.W_hid_to_cell,
W_cell=None,
b=l_box_lstm.b_cell) )
l_scores1 = InnerProductLayer([l_context_final_reps, l_ans1_panel_reps], is_cc=True)
l_scores2 = InnerProductLayer([l_context_final_reps, l_ans2_panel_reps], is_cc=True)
l_scores = lasagne.layers.concat([l_scores1, l_scores2], axis=-1)
l_scores = lasagne.layers.NonlinearityLayer(l_scores, nonlinearity=lasagne.nonlinearities.softmax)
preds = lasagne.layers.get_output(l_scores)
loss = T.mean(lasagne.objectives.categorical_crossentropy(preds, in_labels))
all_params = lasagne.layers.get_all_params(l_scores, trainable=True)
updates = lasagne.updates.adam(loss, all_params, learning_rate=lr)
train_fn = theano.function([in_context_fc7, in_context_bb, in_bbmask, in_context, in_cmask,
in_answer_fc7, in_answer_bb, in_ans1, in_amask1, in_ans2, in_amask2, in_labels],
loss, updates=updates, on_unused_input='warn')
pred_fn = theano.function([in_context_fc7, in_context_bb, in_bbmask, in_context, in_cmask,
in_answer_fc7, in_answer_bb, in_ans1, in_amask1, in_ans2, in_amask2],
preds, on_unused_input='warn')
return train_fn, pred_fn, l_scores
def build_image_only_network(d_word, d_hidden, lr, eps=1e-6):
# input theano vars
in_context_fc7 = T.tensor3(name='context_images')
in_context_bb = T.tensor4(name='context_bb')
in_bbmask = T.tensor3(name='bounding_box_mask')
in_context = T.itensor4(name='context')
in_cmask = T.tensor4(name='context_mask')
in_answer_fc7 = T.matrix(name='answer_images')
in_answer_bb = T.tensor3(name='answer_bb')
in_ans1 = T.itensor3(name='answers')
in_amask1 = T.tensor3(name='answer_mask')
in_ans2 = T.itensor3(name='answers')
in_amask2 = T.tensor3(name='answer_mask')
in_labels = T.imatrix(name='labels')
# define network
l_context_fc7 = lasagne.layers.InputLayer(shape=(None, 3, 4096), input_var=in_context_fc7)
l_answer_fc7 = lasagne.layers.InputLayer(shape=(None, 4096), input_var=in_answer_fc7)
l_ans1 = lasagne.layers.InputLayer(shape=(None, 2, max_words), input_var=in_ans1)
l_ans2 = lasagne.layers.InputLayer(shape=(None, 2, max_words), input_var=in_ans2)
l_amask1 = lasagne.layers.InputLayer(shape=l_ans1.shape, input_var=in_amask1)
l_amask2 = lasagne.layers.InputLayer(shape=l_ans2.shape, input_var=in_amask2)
l_bbmask = lasagne.layers.InputLayer(shape=(None, 3, max_boxes), input_var=in_bbmask)
# contexts and answers should share embeddings
l_ans1_emb = lasagne.layers.EmbeddingLayer(l_ans1, len_voc,
d_word)
l_ans2_emb = lasagne.layers.EmbeddingLayer(l_ans2, len_voc,
d_word, W=l_ans1_emb.W)
l_context_proj = lasagne.layers.DenseLayer(l_context_fc7, num_units=d_word, nonlinearity=lasagne.nonlinearities.rectify, num_leading_axes=2)
l_context_final_reps = lasagne.layers.LSTMLayer(l_context_proj, num_units=d_hidden, only_return_final=True)
l_ans1_reps = SumAverageLayer([l_ans1_emb, l_amask1], compute_sum=True, num_dims=3)
l_ans1_panel_reps = lasagne.layers.LSTMLayer(l_ans1_reps, num_units=d_word, only_return_final=True)
l_ans1_concat = MyConcatLayer([l_ans1_panel_reps, l_answer_fc7], axis=-1)
l_ans1_proj = lasagne.layers.DenseLayer(l_ans1_concat, num_units=d_hidden, nonlinearity=lasagne.nonlinearities.rectify)
l_ans2_reps = SumAverageLayer([l_ans2_emb, l_amask2], compute_sum=True, num_dims=3)
l_ans2_panel_reps = lasagne.layers.LSTMLayer(l_ans2_reps, num_units=d_word, only_return_final=True,
ingate=lasagne.layers.Gate(W_in=l_ans1_panel_reps.W_in_to_ingate,
W_hid=l_ans1_panel_reps.W_hid_to_ingate,
W_cell=l_ans1_panel_reps.W_cell_to_ingate,
b=l_ans1_panel_reps.b_ingate),
outgate=lasagne.layers.Gate(W_in=l_ans1_panel_reps.W_in_to_outgate,
W_hid=l_ans1_panel_reps.W_hid_to_outgate,
W_cell=l_ans1_panel_reps.W_cell_to_outgate,
b=l_ans1_panel_reps.b_outgate),
forgetgate=lasagne.layers.Gate(W_in=l_ans1_panel_reps.W_in_to_forgetgate,
W_hid=l_ans1_panel_reps.W_hid_to_forgetgate,
W_cell=l_ans1_panel_reps.W_cell_to_forgetgate,
b=l_ans1_panel_reps.b_forgetgate),
cell=lasagne.layers.Gate(W_in=l_ans1_panel_reps.W_in_to_cell,
W_hid=l_ans1_panel_reps.W_hid_to_cell,
W_cell=None,
b=l_ans1_panel_reps.b_cell) )
l_ans2_concat = MyConcatLayer([l_ans2_panel_reps, l_answer_fc7], axis=-1)
l_ans2_proj = lasagne.layers.DenseLayer(l_ans2_concat, num_units=d_hidden, nonlinearity=lasagne.nonlinearities.rectify,
W=l_ans1_proj.W, b=l_ans1_proj.b)
l_scores1 = InnerProductLayer([l_context_final_reps, l_ans1_proj], is_cc=True)
l_scores2 = InnerProductLayer([l_context_final_reps, l_ans2_proj], is_cc=True)
l_scores = lasagne.layers.concat([l_scores1, l_scores2], axis=-1)
l_scores = lasagne.layers.NonlinearityLayer(l_scores, nonlinearity=lasagne.nonlinearities.softmax)
preds = lasagne.layers.get_output(l_scores)
loss = T.mean(lasagne.objectives.categorical_crossentropy(preds, in_labels))
all_params = lasagne.layers.get_all_params(l_scores, trainable=True)
updates = lasagne.updates.adam(loss, all_params, learning_rate=lr)
train_fn = theano.function([in_context_fc7, in_context_bb, in_bbmask, in_context, in_cmask,
in_answer_fc7, in_answer_bb, in_ans1, in_amask1, in_ans2, in_amask2, in_labels],
loss, updates=updates, on_unused_input='warn')
pred_fn = theano.function([in_context_fc7, in_context_bb, in_bbmask, in_context, in_cmask,
in_answer_fc7, in_answer_bb, in_ans1, in_amask1, in_ans2, in_amask2],
preds, on_unused_input='warn')
return train_fn, pred_fn, l_scores
def build_image_text_network(d_word, d_hidden, lr, eps=1e-6):
# input theano vars
in_context_fc7 = T.tensor3(name='context_images')
in_context_bb = T.tensor4(name='context_bb')
in_bbmask = T.tensor3(name='bounding_box_mask')
in_context = T.itensor4(name='context')
in_cmask = T.tensor4(name='context_mask')
in_answer_fc7 = T.matrix(name='answer_images')
in_answer_bb = T.tensor3(name='answer_bb')
in_ans1 = T.itensor3(name='answers')
in_amask1 = T.tensor3(name='answer_mask')
in_ans2 = T.itensor3(name='answers')
in_amask2 = T.tensor3(name='answer_mask')
in_labels = T.imatrix(name='labels')
# define network
l_context_fc7 = lasagne.layers.InputLayer(shape=(None, 3, 4096), input_var=in_context_fc7)
l_answer_fc7 = lasagne.layers.InputLayer(shape=(None, 4096), input_var=in_answer_fc7)
l_context = lasagne.layers.InputLayer(shape=(None, max_panels, max_boxes, max_words),
input_var=in_context)
l_ans1 = lasagne.layers.InputLayer(shape=(None, 2, max_words), input_var=in_ans1)
l_ans2 = lasagne.layers.InputLayer(shape=(None, 2, max_words), input_var=in_ans2)
l_cmask = lasagne.layers.InputLayer(shape=l_context.shape, input_var=in_cmask)
l_amask1 = lasagne.layers.InputLayer(shape=l_ans1.shape, input_var=in_amask1)
l_amask2 = lasagne.layers.InputLayer(shape=l_ans2.shape, input_var=in_amask2)
l_bbmask = lasagne.layers.InputLayer(shape=(None, 3, max_boxes), input_var=in_bbmask)
# contexts and answers should share embeddings
l_context_emb = lasagne.layers.EmbeddingLayer(l_context, len_voc,
d_word, name='word_emb')
l_ans1_emb = lasagne.layers.EmbeddingLayer(l_ans1, len_voc,
d_word, W=l_context_emb.W)
l_ans2_emb = lasagne.layers.EmbeddingLayer(l_ans2, len_voc,
d_word, W=l_context_emb.W)
l_context_box_reps = SumAverageLayer([l_context_emb, l_cmask], compute_sum=True, num_dims=4)
l_box_reshape = lasagne.layers.ReshapeLayer(l_context_box_reps, (-1, max_boxes, d_word))
l_bbmask_reshape = lasagne.layers.ReshapeLayer(l_bbmask, (-1, max_boxes))
l_box_lstm = lasagne.layers.LSTMLayer(l_box_reshape, num_units=d_word, mask_input=l_bbmask_reshape, only_return_final=True)
l_context_panel_reps = lasagne.layers.ReshapeLayer(l_box_lstm, (-1, 3, d_word))
l_context_concat = MyConcatLayer([l_context_panel_reps, l_context_fc7], axis=-1)
l_context_proj = lasagne.layers.DenseLayer(l_context_concat, num_units=d_word, nonlinearity=lasagne.nonlinearities.rectify, num_leading_axes=2)
l_context_final_reps = lasagne.layers.LSTMLayer(l_context_proj, num_units=d_hidden, only_return_final=True)
l_ans1_reps = SumAverageLayer([l_ans1_emb, l_amask1], compute_sum=True, num_dims=3)
l_ans1_panel_reps = lasagne.layers.LSTMLayer(l_ans1_reps, num_units=d_word, only_return_final=True,
ingate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_ingate,
W_hid=l_box_lstm.W_hid_to_ingate,
W_cell=l_box_lstm.W_cell_to_ingate,
b=l_box_lstm.b_ingate),
outgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_outgate,
W_hid=l_box_lstm.W_hid_to_outgate,
W_cell=l_box_lstm.W_cell_to_outgate,
b=l_box_lstm.b_outgate),
forgetgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_forgetgate,
W_hid=l_box_lstm.W_hid_to_forgetgate,
W_cell=l_box_lstm.W_cell_to_forgetgate,
b=l_box_lstm.b_forgetgate),
cell=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_cell,
W_hid=l_box_lstm.W_hid_to_cell,
W_cell=None,
b=l_box_lstm.b_cell) )
l_ans1_concat = MyConcatLayer([l_ans1_panel_reps, l_answer_fc7], axis=-1)
l_ans1_proj = lasagne.layers.DenseLayer(l_ans1_concat, num_units=d_hidden, nonlinearity=lasagne.nonlinearities.rectify,
W=l_context_proj.W, b=l_context_proj.b)
l_ans2_reps = SumAverageLayer([l_ans2_emb, l_amask2], compute_sum=True, num_dims=3)
l_ans2_panel_reps = lasagne.layers.LSTMLayer(l_ans2_reps, num_units=d_word, only_return_final=True,
ingate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_ingate,
W_hid=l_box_lstm.W_hid_to_ingate,
W_cell=l_box_lstm.W_cell_to_ingate,
b=l_box_lstm.b_ingate),
outgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_outgate,
W_hid=l_box_lstm.W_hid_to_outgate,
W_cell=l_box_lstm.W_cell_to_outgate,
b=l_box_lstm.b_outgate),
forgetgate=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_forgetgate,
W_hid=l_box_lstm.W_hid_to_forgetgate,
W_cell=l_box_lstm.W_cell_to_forgetgate,
b=l_box_lstm.b_forgetgate),
cell=lasagne.layers.Gate(W_in=l_box_lstm.W_in_to_cell,
W_hid=l_box_lstm.W_hid_to_cell,
W_cell=None,
b=l_box_lstm.b_cell) )
l_ans2_concat = MyConcatLayer([l_ans2_panel_reps, l_answer_fc7], axis=-1)
l_ans2_proj = lasagne.layers.DenseLayer(l_ans2_concat, num_units=d_hidden, nonlinearity=lasagne.nonlinearities.rectify,
W=l_context_proj.W, b=l_context_proj.b)
l_scores1 = InnerProductLayer([l_context_final_reps, l_ans1_proj], is_cc=True)
l_scores2 = InnerProductLayer([l_context_final_reps, l_ans2_proj], is_cc=True)
l_scores = lasagne.layers.concat([l_scores1, l_scores2], axis=-1)
l_scores = lasagne.layers.NonlinearityLayer(l_scores, nonlinearity=lasagne.nonlinearities.softmax)
preds = lasagne.layers.get_output(l_scores)
loss = T.mean(lasagne.objectives.categorical_crossentropy(preds, in_labels))
all_params = lasagne.layers.get_all_params(l_scores, trainable=True)
updates = lasagne.updates.adam(loss, all_params, learning_rate=lr)
train_fn = theano.function([in_context_fc7, in_context_bb, in_bbmask, in_context, in_cmask,
in_answer_fc7, in_answer_bb, in_ans1, in_amask1, in_ans2, in_amask2, in_labels],
loss, updates=updates, on_unused_input='warn')
pred_fn = theano.function([in_context_fc7, in_context_bb, in_bbmask, in_context, in_cmask,
in_answer_fc7, in_answer_bb, in_ans1, in_amask1, in_ans2, in_amask2],
preds, on_unused_input='warn')
return train_fn, pred_fn, l_scores
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='character coherence models')
parser.add_argument('-data', default='data/comics.h5')
parser.add_argument('-vocab', default='data/comics_vocab.p')
parser.add_argument('-model', default='image_only',
help='image_text, image_only, or text_only')
parser.add_argument('-vgg_feats', default='data/vgg_features.h5')
parser.add_argument('-d_word', default=256, type=int)
parser.add_argument('-d_hidden', default=256, type=int)
parser.add_argument('-lr', default=0.001, type=float)
parser.add_argument('-n_epochs', default=10, type=int)
parser.add_argument('-megabatch_size', default=512, type=int)
parser.add_argument('-batch_size', default=64, type=int)
args = parser.parse_args()
print 'loading data...'
vdict, rvdict = cPickle.load(open(args.vocab, 'rb'))
comics_data = h5.File(args.data, 'r')
all_vggs = h5.File(args.vgg_feats, 'r')
train_data = load_hdf5(comics_data['train'], all_vggs['train'])
dev_data = load_hdf5(comics_data['dev'], all_vggs['dev'])
test_data = load_hdf5(comics_data['test'], all_vggs['test'])
print 'training %s model for character_coherence with d_word=%d, d_hidden=%d' %\
(args.model, args.d_word, args.d_hidden)
log = open('logs/%s_%s_%ddword_%ddhidden.log' % (args.model, 'character_coherence', \
args.d_word, args.d_hidden), 'w')
# predefined parameters
total_pages, max_panels, max_boxes, max_words = comics_data['train']['words'].shape
len_voc = len(vdict)
log = open('logs/%s_%s_%ddword_%ddhidden.log' % (args.model, 'character_coherence', \
args.d_word, args.d_hidden), 'w')
dev_fold = 'folds/%s_%s.csv' % ('char_coherence', 'dev')
test_fold = 'folds/%s_%s.csv' % ('char_coherence', 'test')
build_dict = {'image_text': build_image_text_network,
'image_only': build_image_only_network,
'text_only': build_text_only_network}
build_fn = build_dict[args.model]
print 'compiling'
train_fn, pred_fn, final_layer = build_fn(args.d_word, args.d_hidden, args.lr)
print 'done compiling'
# generate train minibatches
train_batches = [(x, x + args.megabatch_size) for x in range(0, total_pages, args.megabatch_size)]
print 'training...'
for epoch in range(args.n_epochs):
epoch_loss = 0.
start_time = time.time()
for start, end in train_batches:
for batch in generate_minibatches_from_megabatch(train_data, vdict,
start, end, context_size=3, shuffle_candidates=True):
batch_loss = train_fn(*batch[1:])
epoch_loss += batch_loss
epoch_log = 'done with epoch %d in %d seconds, loss is %f' % \
(epoch, time.time() - start_time, epoch_loss / len(train_batches))
log.write(epoch_log + '\n')
print epoch_log
dev_val = validate('dev', dev_data, dev_fold)
test_val = validate('test', test_data, test_fold)
log.write(dev_val + '\n')
log.write(test_val + '\n\n')
print dev_val
print test_val
log.flush()
| 55.829949
| 151
| 0.670273
| 3,224
| 21,997
| 4.169975
| 0.077233
| 0.08316
| 0.038084
| 0.029456
| 0.838962
| 0.830482
| 0.815903
| 0.803109
| 0.789423
| 0.765248
| 0
| 0.019257
| 0.228031
| 21,997
| 393
| 152
| 55.97201
| 0.772452
| 0.014138
| 0
| 0.66358
| 0
| 0
| 0.050252
| 0.002956
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.021605
| null | null | 0.024691
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81edb614de12175b3d3d6b961e28995f0a9363e7
| 1,837
|
py
|
Python
|
python/miind/include.py
|
dekamps/miind
|
4b321c62c2bd27eb0d5d8336a16a9e840ba63856
|
[
"MIT"
] | 13
|
2015-09-15T17:28:25.000Z
|
2022-03-22T20:26:47.000Z
|
python/miind/include.py
|
dekamps/miind
|
4b321c62c2bd27eb0d5d8336a16a9e840ba63856
|
[
"MIT"
] | 41
|
2015-08-25T07:50:55.000Z
|
2022-03-21T16:20:37.000Z
|
python/miind/include.py
|
dekamps/miind
|
4b321c62c2bd27eb0d5d8336a16a9e840ba63856
|
[
"MIT"
] | 9
|
2015-09-14T20:52:07.000Z
|
2022-03-08T12:18:18.000Z
|
includes = ['#include <boost/timer/timer.hpp>','#include <GeomLib.hpp>','#include <TwoDLib.hpp>', '#include <MPILib/include/MPINetworkCode.hpp>','#include <MPILib/include/RateAlgorithmCode.hpp>','#include <MPILib/include/SimulationRunParameter.hpp>', '#include <MPILib/include/report/handler/RootReportHandler.hpp>','#include <MPILib/include/WilsonCowanAlgorithm.hpp>','#include <MPILib/include/PersistantAlgorithm.hpp>', '#include <MPILib/include/DelayAlgorithmCode.hpp>','#include <MPILib/include/RateFunctorCode.hpp>', '#include <TwoDLib/GridReport.hpp>', '#include <thread>','#include <TwoDLib/MeshAlgorithmCustom.hpp>','#include <TwoDLib/GridAlgorithm.hpp>', '#include <TwoDLib/GridJumpAlgorithmCode.hpp>', '#include <TwoDLib/GridSomaDendriteAlgorithmCode.hpp>','#include <MPILib/include/report/handler/MinimalReportHandler.hpp>']
lib_includes = ['#define PY_SSIZE_T_CLEAN', '#include <Python.h>', '#include <boost/timer/timer.hpp>','#include <thread>','#include <GeomLib.hpp>','#include <TwoDLib.hpp>', '#include <MPILib/include/MPINetworkCode.hpp>','#include <MPILib/include/RateAlgorithmCode.hpp>','#include <MPILib/include/SimulationRunParameter.hpp>', '#include <MPILib/include/report/handler/RootReportHandler.hpp>', '#include <MPILib/include/report/handler/MinimalReportHandler.hpp>\n','#include <MPILib/include/WilsonCowanAlgorithm.hpp>', '#include <TwoDLib/GridReport.hpp>','#include <MPILib/include/PersistantAlgorithm.hpp>', '#include <TwoDLib/GridAlgorithm.hpp>','#include <TwoDLib/MeshAlgorithmCustom.hpp>', '#include <TwoDLib/GridJumpAlgorithmCode.hpp>', '#include <TwoDLib/GridSomaDendriteAlgorithmCode.hpp>','#include <MPILib/include/DelayAlgorithmCode.hpp>','#include <MPILib/include/RateFunctorCode.hpp>','#include <MPILib/include/MiindTvbModelAbstract.hpp>', '#include <MPILib/include/utilities/Exception.hpp>']
| 459.25
| 999
| 0.778443
| 187
| 1,837
| 7.625668
| 0.197861
| 0.231417
| 0.280505
| 0.306452
| 0.89972
| 0.89972
| 0.743338
| 0.630435
| 0.579944
| 0.579944
| 0
| 0
| 0.034839
| 1,837
| 3
| 1,000
| 612.333333
| 0.804287
| 0
| 0
| 0
| 0
| 0
| 0.908547
| 0.661949
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5edaa9d844b6ba6eb5f23af772f8f58edc304ac3
| 19,800
|
py
|
Python
|
tests/test_TwoPoolStrategy.py
|
speraxdev/USDs
|
8ff2dfaf2173fadacf49619473d681707fc8507c
|
[
"MIT"
] | null | null | null |
tests/test_TwoPoolStrategy.py
|
speraxdev/USDs
|
8ff2dfaf2173fadacf49619473d681707fc8507c
|
[
"MIT"
] | null | null | null |
tests/test_TwoPoolStrategy.py
|
speraxdev/USDs
|
8ff2dfaf2173fadacf49619473d681707fc8507c
|
[
"MIT"
] | null | null | null |
import pytest
import json
import time
import brownie
@pytest.fixture(scope="module", autouse=True)
def invalid_collateral(usdt):
return usdt.address
def user(accounts):
return accounts[9]
def test_collect_interest_pTokens(sperax, usdt,owner_l2):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
# manually get some LP tokens (2CRV) and transfer them to strategy_proxy;
# strategy_proxy will mistake these LP tokens (after being covert back to
# collertal) as earned interest
amount = int(1000000000)
curvePool = brownie.interface.ICurve2Pool('0x7f90122BF0700F9E7e1F688fe926940E8839F353')
amounts = [0, amount]
usdt.approve(curvePool.address, amount, {'from': owner_l2})
curvePool.add_liquidity(amounts, 0, {'from': owner_l2})
lpToken = brownie.interface.IERC20('0x7f90122bf0700f9e7e1f688fe926940e8839f353')
lpToken.transfer(
strategy_proxy, lpToken.balanceOf(owner_l2), {'from': owner_l2})
interest = strategy_proxy.checkInterestEarned(
usdt.address, {'from': vault_proxy.address})
assert interest > 0
assert strategy_proxy.allocatedAmt(usdt.address) == 0
strategy_proxy.collectInterest(
vault_proxy.address,
usdt.address,
{'from': vault_proxy.address}
)
assert strategy_proxy.allocatedAmt(usdt.address) == 0
assert usdt.balanceOf(vault_proxy.address) > 0
def test_total_pTokens_withdraw(sperax, weth, usdt, wbtc, owner_l2, accounts):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
# manually get some LP tokens (2CRV) and transfer them to strategy_proxy;
# strategy_proxy will mistake these LP tokens (after being covert back to
# collertal) as earned interest
amount = int(1000000000)
curvePool = brownie.interface.ICurve2Pool('0x7f90122BF0700F9E7e1F688fe926940E8839F353')
amounts = [0, amount]
usdt.approve(curvePool.address, amount, {'from': owner_l2})
curvePool.add_liquidity(amounts, 0, {'from': owner_l2})
lpToken = brownie.interface.IERC20('0x7f90122bf0700f9e7e1f688fe926940e8839f353')
lpToken.transfer(
strategy_proxy, lpToken.balanceOf(owner_l2), {'from': owner_l2})
checkbalance=strategy_proxy.checkBalance(usdt, {'from': vault_proxy.address})
# withdraw 1/10 of the previous deposit
txn = strategy_proxy.withdraw(
accounts[9],
usdt.address,
(amount/10),
{'from': vault_proxy.address}
)
txn = strategy_proxy.collectInterest(
accounts[8],
usdt,
{'from': vault_proxy.address}
)
def test_withdraw(sperax, usdt, owner_l2, accounts):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(1000000000)
# withdraw before deposit-----------------------------------------------------------------------
with brownie.reverts("Insufficient 2CRV balance"):
txn = strategy_proxy.withdraw(
accounts[9],
usdt.address,
amount,
{'from': vault_proxy.address}
)
# usdt deposit-------------------------------------------------------
# testing the validity of recipient.
zero_address = "0x0000000000000000000000000000000000000000"
with brownie.reverts("Invalid recipient"):
txn = strategy_proxy.withdraw(
zero_address,
usdt.address,
(amount),
{'from': vault_proxy.address}
)
# usdt deposit--------------------------------------------------------------------------
txn = usdt.transfer(strategy_proxy.address,
amount, {'from': owner_l2})
assert txn.return_value == True
txn = strategy_proxy.deposit(
usdt.address,
amount,
{'from': vault_proxy.address}
)
assert txn.events['Deposit']['_asset'] == usdt.address
assert txn.events['Deposit']['_amount'] == amount
# withdraw 1/10 of the previous deposit
txn = strategy_proxy.withdraw(
accounts[9],
usdt.address,
(amount/10),
{'from': vault_proxy.address}
)
with brownie.reverts("Caller is not the Vault"):
strategy_proxy.withdraw(
accounts[9],
usdt.address,
(amount/10),
{'from': owner_l2.address}
)
with brownie.reverts("Insufficient 2CRV balance"):
strategy_proxy.withdraw(
accounts[9],
usdt.address,
(amount + 1),
{'from': vault_proxy.address}
)
def test_check_balance(sperax, weth, usdt):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1]
zero_address = "0x0000000000000000000000000000000000000000"
balance = strategy_proxy.checkBalance(usdt.address, {'from': vault_proxy.address})
with brownie.reverts("Unsupported collateral"):
balance = strategy_proxy.checkBalance(
weth, {'from': vault_proxy.address})
assert balance == 0
with brownie.reverts("Unsupported collateral"):
strategy_proxy.checkBalance(
zero_address, {'from': vault_proxy.address})
def test__safe_approve_all_tokens(sperax, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1]
strategy_proxy.safeApproveAllTokens(
{'from': owner_l2.address}
)
def test_collect_reward_token(sperax):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1]
txn = strategy_proxy.collectRewardToken(
{'from': vault_proxy.address})
def test_set_reward_Token_Address(sperax, usdt, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
txn = strategy_proxy.setRewardTokenAddress(
usdt.address,
{'from': owner_l2.address})
def test_set_reward_liquidation_threshold(sperax, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
threshold = int(10)
txn = strategy_proxy.setRewardLiquidationThreshold(
threshold,
{'from': owner_l2.address})
low_threshold = int(0)
txn = strategy_proxy.setRewardLiquidationThreshold(
threshold,
{'from': owner_l2.address})
def test_set_interest_liquidation_threshold(sperax, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1]
Threshold = int(10)
txn = strategy_proxy.setInterestLiquidationThreshold(
Threshold,
{'from': owner_l2.address})
lowThreshold = int(0)
txn = strategy_proxy.setInterestLiquidationThreshold(
lowThreshold,
{'from': owner_l2.address})
def test_set_PToken_address(sperax, usdt, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
reward_address = '0x11cdb42b0eb46d95f990bedd4695a6e3fa034978'
txn = strategy_proxy.setPTokenAddress(
usdt.address,
reward_address,
{'from': owner_l2.address})
reward_address2 = '0x11cdb42b0eb46d95f990bedd4695a6e3fa034978'
with brownie.reverts("pToken already set"):
strategy_proxy.setPTokenAddress(
usdt.address,
reward_address2,
{'from': owner_l2.address})
def test_set_Reward_Token_zero_address_asset(sperax, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
zero_address = "0x0000000000000000000000000000000000000000"
with brownie.reverts("Invalid addresses"):
strategy_proxy.setPTokenAddress(
zero_address,
zero_address,
{'from': owner_l2.address}
)
def test_set_PToken_address(sperax, usdt, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
ptoken_address2 = '0x11cdb42b0eb46d95f990bedd4695a6e3fa034978'
with brownie.reverts("pToken already set"):
strategy_proxy.setPTokenAddress(
usdt.address,
ptoken_address2,
{'from': owner_l2.address})
ptoken_address = '0x11cdb42b0eb46d95f990bedd4695a6e3fa034978'
with brownie.reverts("pToken already set"):
strategy_proxy.setPTokenAddress(
usdt.address,
ptoken_address,
{'from': owner_l2.address})
def test_remove_PToken(sperax, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
low_index = int(0)
txn = strategy_proxy.removePToken(
low_index,
{'from': owner_l2.address}
)
print("removed PToken:", txn.events['PTokenRemoved']['_pToken'])
print("removed asset:", txn.events['PTokenRemoved']['_asset'])
high_index = int(9999999999)
with brownie.reverts("Invalid index"):
strategy_proxy.removePToken(
high_index,
{'from': owner_l2.address}
)
def test_remove_PToken2(sperax, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
low_index = int(1)
txn = strategy_proxy.removePToken(
low_index,
{'from': owner_l2.address}
)
def test_remove_PToken_assets(sperax, owner_l2, accounts):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
low_index = int(1)
txn = strategy_proxy.removePToken(
low_index,
{'from': owner_l2.address}
)
print("removed PToken:", txn.events['PTokenRemoved']['_pToken'])
print("removed asset:", txn.events['PTokenRemoved']['_asset'])
def test_deposit(sperax, usdt, accounts, owner_l2):
(
spa,
usds_proxy,
core_proxy,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(9999)
txn = usdt.transfer(strategy_proxy.address, amount, {'from': owner_l2})
assert txn.return_value == True
txn = strategy_proxy.deposit(
usdt.address,
amount,
{'from': vault_proxy.address}
)
assert txn.events['Deposit']['_asset'] == usdt.address
assert txn.events['Deposit']['_amount'] == amount
balance = strategy_proxy.checkBalance(usdt, {'from': vault_proxy.address})
assert balance > 0
def test_deposit_invalid_amount(sperax, usdt):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(0)
with brownie.reverts("Must deposit something"):
txn = strategy_proxy.deposit(
usdt.address,
amount,
{'from': vault_proxy.address}
)
def test_deposit_invalid_assets(sperax,weth, invalid_collateral):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(9999)
with brownie.reverts("Unsupported collateral"):
strategy_proxy.deposit(
weth,
amount,
{'from': vault_proxy.address}
)
def test_withdraw_invalid_assets(sperax, weth, accounts):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(9999)
with brownie.reverts("Unsupported collateral"):
txn = strategy_proxy.withdraw(
accounts[8],
weth,
(amount/10),
{'from': vault_proxy.address})
def test_withdraw_invalid_amount(sperax, usdt, accounts):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(0)
with brownie.reverts("Invalid amount"):
txn = strategy_proxy.withdraw(
accounts[8],
usdt.address,
(amount),
{'from': vault_proxy.address}
)
def test_collect_interest_invalid(sperax, usdt, weth, accounts, owner_l2):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(1000000000)
# testing invalid cases
zero_address = "0x0000000000000000000000000000000000000000"
txn = strategy_proxy.supportsCollateral(
weth,
{'from': vault_proxy.address}
)
with brownie.reverts("Unsupported collateral"):
txn = strategy_proxy.collectInterest(
accounts[8],
weth,
{'from': vault_proxy.address}
)
with brownie.reverts("Invalid recipient"):
txn = strategy_proxy.collectInterest(
zero_address,
usdt.address,
{'from': vault_proxy.address}
)
with brownie.reverts("Unsupported collateral"):
strategy_proxy.checkInterestEarned(
weth, {'from': vault_proxy.address})
with brownie.reverts():
strategy_proxy.collectInterest(
accounts[8],
usdt.address,
{'from': vault_proxy.address}
)
txn = usdt.transfer(strategy_proxy.address, amount, {'from': owner_l2})
assert txn.return_value == True
txn = strategy_proxy.deposit(
usdt.address,
amount,
{'from': vault_proxy.address}
)
assert txn.events['Deposit']['_asset'] == usdt.address
assert txn.events['Deposit']['_amount'] == amount
print("Amount Deposited: ", amount)
def test_collect_interest_zero_interest(sperax, usdt, accounts):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
interest = strategy_proxy.checkInterestEarned(
usdt.address, {'from': vault_proxy.address})
assert interest == 0
with brownie.reverts("No interest earned"):
strategy_proxy.collectInterest(
vault_proxy.address,
usdt.address,
{'from': vault_proxy.address}
)
def test_collect_interest(sperax, usdt, accounts, owner_l2):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
# manually get some LP tokens (2CRV) and transfer them to strategy_proxy;
# strategy_proxy will mistake these LP tokens (after being covert back to
# collertal) as earned interest
amount = int(1000000000)
curvePool = brownie.interface.ICurve2Pool('0x7f90122BF0700F9E7e1F688fe926940E8839F353')
amounts = [0, amount]
usdt.approve(curvePool.address, amount, {'from': owner_l2})
curvePool.add_liquidity(amounts, 0, {'from': owner_l2})
lpToken = brownie.interface.IERC20('0x7f90122bf0700f9e7e1f688fe926940e8839f353')
lpToken.transfer(
strategy_proxy, lpToken.balanceOf(owner_l2), {'from': owner_l2})
interest = strategy_proxy.checkInterestEarned(
usdt.address, {'from': vault_proxy.address})
assert interest > 0
assert strategy_proxy.allocatedAmt(usdt.address) == 0
strategy_proxy.collectInterest(
vault_proxy.address,
usdt.address,
{'from': vault_proxy.address}
)
assert strategy_proxy.allocatedAmt(usdt.address) == 0
assert usdt.balanceOf(vault_proxy.address) > 0
def test_withdraw_to_vault_invalid_amount(sperax, usdt, owner_l2):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(0)
with brownie.reverts("Invalid amount"):
txn = strategy_proxy.withdrawToVault(
usdt.address,
(amount),
{'from': owner_l2.address}
)
def test_withdraw_to_vault_invalid_assets(sperax, weth, owner_l2):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(10005)
with brownie.reverts("Unsupported collateral"):
strategy_proxy.withdrawToVault(
weth,
(amount),
{'from': owner_l2.address})
def test_withdraw_to_vault(sperax, usdt, owner_l2):
(
spa,
usds_proxy,
vault_core_tools,
vault_proxy,
oracle_proxy,
strategy_proxies,
buybacks,
bancor
) = sperax
strategy_proxy = strategy_proxies[1];
amount = int(1000000)
txn = usdt.transfer(strategy_proxy.address,
amount, {'from': owner_l2})
assert txn.return_value == True
txn = strategy_proxy.deposit(
usdt.address,
amount,
{'from': vault_proxy.address}
)
assert txn.events['Deposit']['_asset'] == usdt.address
txn = strategy_proxy.withdrawToVault(
usdt.address,
(amount/2),
{'from': owner_l2.address}
)
with brownie.reverts("Insufficient 2CRV balance"):
strategy_proxy.withdrawToVault(
usdt.address,
(amount + 10000),
{'from': owner_l2.address}
)
| 27.086183
| 100
| 0.61202
| 1,953
| 19,800
| 5.956989
| 0.076805
| 0.105037
| 0.089393
| 0.055957
| 0.882757
| 0.86952
| 0.831013
| 0.792333
| 0.735345
| 0.703455
| 0
| 0.048774
| 0.285505
| 19,800
| 730
| 101
| 27.123288
| 0.773592
| 0.045657
| 0
| 0.785489
| 0
| 0
| 0.08136
| 0.031146
| 0
| 0
| 0.031146
| 0
| 0.0347
| 1
| 0.044164
| false
| 0
| 0.006309
| 0.003155
| 0.053628
| 0.007886
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f1bf12f3f0208b5f10f1a7e757d387fc47150c4
| 7,612
|
py
|
Python
|
tests/integration/local_group_test.py
|
nordic-institute/X-Road-Security-Server-toolkit
|
1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb
|
[
"MIT"
] | 7
|
2020-11-01T19:50:11.000Z
|
2022-01-18T17:45:19.000Z
|
tests/integration/local_group_test.py
|
nordic-institute/X-Road-Security-Server-toolkit
|
1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb
|
[
"MIT"
] | 24
|
2020-11-09T08:09:10.000Z
|
2021-06-16T07:22:14.000Z
|
tests/integration/local_group_test.py
|
nordic-institute/X-Road-Security-Server-toolkit
|
1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb
|
[
"MIT"
] | 1
|
2021-04-27T14:39:48.000Z
|
2021-04-27T14:39:48.000Z
|
from tests.util.test_util import get_client
from xrdsst.main import XRDSSTTest
from xrdsst.core.conf_keys import ConfKeysSecServerClients
from xrdsst.controllers.local_group import LocalGroupController, LocalGroupListMapper
class LocalGroupTest:
def __init__(self, end_to_end_tests):
self.test = end_to_end_tests
def step_add_local_group(self):
with XRDSSTTest() as app:
local_group_controller = LocalGroupController()
local_group_controller.app = app
ssn = 0
for security_server_conf in self.test.config["security_server"]:
configuration = local_group_controller.create_api_config(security_server_conf, self.test.config)
for client_conf in security_server_conf["clients"]:
if ConfKeysSecServerClients.CONF_KEY_SS_CLIENT_SUBSYSTEM_CODE in client_conf:
for local_group_conf in client_conf[ConfKeysSecServerClients.CONF_KEY_LOCAL_GROUPS]:
found_client = get_client(self.test.config, client_conf, ssn)
local_group_controller.remote_add_local_group(configuration, security_server_conf,
client_conf, local_group_conf)
assert len(found_client) > 0
client_local_groups = local_group_controller.remote_list_local_groups(configuration, found_client[0]["id"])
assert len(client_local_groups) == 1
ssn = ssn + 1
def list_local_groups(self):
with XRDSSTTest() as app:
local_group_controller = LocalGroupController()
local_group_controller.app = app
ssn = 0
for security_server_conf in self.test.config["security_server"]:
configuration = local_group_controller.create_api_config(security_server_conf, self.test.config)
for client_conf in security_server_conf["clients"]:
if ConfKeysSecServerClients.CONF_KEY_SS_CLIENT_SUBSYSTEM_CODE in client_conf:
found_client = get_client(self.test.config, client_conf, ssn)
client_local_groups = local_group_controller.remote_list_local_groups(configuration, found_client[0]["id"])
assert len(client_local_groups) == 1
for header in LocalGroupListMapper.headers():
assert header in local_group_controller.app._last_rendered[0][0]
assert len(local_group_controller.app._last_rendered[0]) == 2
ssn = ssn + 1
def step_add_local_group_member(self):
with XRDSSTTest() as app:
local_group_controller = LocalGroupController()
local_group_controller.app = app
ssn = 0
for security_server_conf in self.test.config["security_server"]:
configuration = local_group_controller.create_api_config(security_server_conf, self.test.config)
for client_conf in security_server_conf["clients"]:
if ConfKeysSecServerClients.CONF_KEY_SS_CLIENT_SUBSYSTEM_CODE in client_conf:
for local_group_conf in client_conf[ConfKeysSecServerClients.CONF_KEY_LOCAL_GROUPS]:
local_group_controller.remote_add_local_group_member(configuration, security_server_conf,
client_conf, local_group_conf)
found_client = get_client(self.test.config, client_conf, ssn)
client_local_groups = local_group_controller.remote_list_local_groups(configuration, found_client[0]["id"])
assert len(client_local_groups) == 1
assert len(client_local_groups[0].members) == 1
ssn = ssn + 1
def step_delete_local_group_member(self):
with XRDSSTTest() as app:
local_group_controller = LocalGroupController()
local_group_controller.app = app
ssn = 0
for security_server_conf in self.test.config["security_server"]:
configuration = local_group_controller.create_api_config(security_server_conf, self.test.config)
for client_conf in security_server_conf["clients"]:
if ConfKeysSecServerClients.CONF_KEY_SS_CLIENT_SUBSYSTEM_CODE in client_conf:
for local_group_conf in client_conf[ConfKeysSecServerClients.CONF_KEY_LOCAL_GROUPS]:
found_client = get_client(self.test.config, client_conf, ssn)
client_local_groups = local_group_controller.remote_list_local_groups(configuration,
found_client[0]["id"])
assert len(client_local_groups) == 1
assert len(client_local_groups[0].members) == 1
local_group_ids = [client_local_groups[0].id]
local_groups_members_id = client_local_groups[0].members[0].id
local_group_controller.remote_delete_local_group_member(configuration, local_group_ids, local_groups_members_id)
client_local_groups_after = local_group_controller.remote_list_local_groups(configuration,
found_client[0]["id"])
assert len(client_local_groups_after[0].members) == 0
ssn = ssn + 1
def step_delete_local_group(self):
with XRDSSTTest() as app:
local_group_controller = LocalGroupController()
local_group_controller.app = app
ssn = 0
for security_server_conf in self.test.config["security_server"]:
configuration = local_group_controller.create_api_config(security_server_conf, self.test.config)
for client_conf in security_server_conf["clients"]:
if ConfKeysSecServerClients.CONF_KEY_SS_CLIENT_SUBSYSTEM_CODE in client_conf:
for local_group_conf in client_conf[ConfKeysSecServerClients.CONF_KEY_LOCAL_GROUPS]:
found_client = get_client(self.test.config, client_conf, ssn)
client_local_groups = local_group_controller.remote_list_local_groups(configuration,
found_client[0]["id"])
assert len(client_local_groups) == 1
local_group_ids = [client_local_groups[0].id]
local_group_controller.remote_delete_local_group(configuration, local_group_ids)
client_local_groups_after = local_group_controller.remote_list_local_groups(configuration,
found_client[0]["id"])
assert len(client_local_groups_after) == 0
ssn = ssn + 1
def test_run_configuration(self):
self.step_add_local_group()
self.list_local_groups()
self.step_add_local_group_member()
self.step_delete_local_group_member()
self.step_delete_local_group()
| 61.886179
| 140
| 0.600499
| 790
| 7,612
| 5.360759
| 0.08481
| 0.120425
| 0.132231
| 0.067532
| 0.892562
| 0.874144
| 0.867769
| 0.820071
| 0.794097
| 0.749705
| 0
| 0.00758
| 0.341435
| 7,612
| 122
| 141
| 62.393443
| 0.837223
| 0
| 0
| 0.72381
| 0
| 0
| 0.01629
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.066667
| false
| 0
| 0.038095
| 0
| 0.114286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
488bad838a7c088a1588e8dc0bde319e551e6ce7
| 19,826
|
py
|
Python
|
r3det/core/bbox/coder/delta_xywha_hbbox_coder.py
|
SJTU-Thinklab-Det/r3det-pytorch
|
aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd
|
[
"Apache-2.0"
] | 42
|
2021-12-09T10:02:35.000Z
|
2022-03-30T08:40:20.000Z
|
r3det/core/bbox/coder/delta_xywha_hbbox_coder.py
|
SJTU-Thinklab-Det/r3det-pytorch
|
aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd
|
[
"Apache-2.0"
] | 13
|
2021-12-14T01:47:32.000Z
|
2022-03-30T08:01:17.000Z
|
r3det/core/bbox/coder/delta_xywha_hbbox_coder.py
|
SJTU-Thinklab-Det/r3det-pytorch
|
aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd
|
[
"Apache-2.0"
] | 5
|
2021-12-14T09:57:29.000Z
|
2022-03-03T12:25:54.000Z
|
import mmcv
import numpy as np
import torch
from mmdet.core.bbox.builder import BBOX_CODERS
from mmdet.core.bbox.coder.base_bbox_coder import BaseBBoxCoder
pi = np.pi
@BBOX_CODERS.register_module()
class DeltaXYWHAHBBoxCoder(BaseBBoxCoder):
"""Delta XYWHA HBBox coder.
this coder encodes bbox (x1, y1, x2, y2) into delta (dx, dy, dw, dh, da)
and decodes delta (dx, dy, dw, dh, da) back to original bbox
(cx, cy, w, h, a).
Args:
target_means (Sequence[float]): Denormalizing means of target for
delta coordinates
target_stds (Sequence[float]): Denormalizing standard deviation of
target for delta coordinates
clip_border (bool, optional): Whether clip the objects outside the
border of the image. Defaults to True.
add_ctr_clamp (bool): Whether to add center clamp, when added, the
predicted box is clamped is its center is too far away from
the original anchor's center. Only used by YOLOF. Default False.
ctr_clamp (int): the maximum pixel shift to clamp. Only used by YOLOF.
Default 32.
"""
def __init__(self,
target_means=(0., 0., 0., 0., 0.),
target_stds=(1., 1., 1., 1., 1.),
angle_range='v1',
clip_border=True,
add_ctr_clamp=False,
ctr_clamp=32):
super(BaseBBoxCoder, self).__init__()
self.means = target_means
self.stds = target_stds
self.angle_range = angle_range
self.clip_border = clip_border
self.add_ctr_clamp = add_ctr_clamp
self.ctr_clamp = ctr_clamp
def encode(self, bboxes, gt_bboxes):
"""Get box regression transformation deltas that can be used to
transform the ``bboxes`` into the ``gt_bboxes``.
Args:
bboxes (torch.Tensor): Source boxes, e.g., object proposals.
gt_bboxes (torch.Tensor): Target of the transformation, e.g.,
ground-truth boxes.
Returns:
torch.Tensor: Box transformation deltas
"""
assert bboxes.size(0) == gt_bboxes.size(0)
assert bboxes.size(-1) == 4
assert gt_bboxes.size(-1) == 5
if self.angle_range == 'v1':
return bbox2delta_v1(bboxes, gt_bboxes, self.means, self.stds)
elif self.angle_range == 'v2':
return bbox2delta_v2(bboxes, gt_bboxes, self.means, self.stds)
elif self.angle_range == 'v3':
return bbox2delta_v3(bboxes, gt_bboxes, self.means, self.stds)
else:
raise NotImplementedError
def decode(self,
bboxes,
pred_bboxes,
max_shape=None,
wh_ratio_clip=16 / 1000):
"""Apply transformation `pred_bboxes` to `boxes`.
Args:
bboxes (torch.Tensor): Basic boxes. Shape (B, N, 4) or (N, 4)
pred_bboxes (torch.Tensor): Encoded offsets with respect to each
roi. Has shape (B, N, num_classes * 5) or (B, N, 5) or
(N, num_classes * 5) or (N, 5). Note N = num_anchors * W * H
when rois is a grid of anchors.Offset encoding follows [1]_.
max_shape (Sequence[int] or torch.Tensor or Sequence[
Sequence[int]],optional): Maximum bounds for boxes, specifies
(H, W, C) or (H, W). If bboxes shape is (B, N, 5), then
the max_shape should be a Sequence[Sequence[int]]
and the length of max_shape should also be B.
wh_ratio_clip (float, optional): The allowed ratio between
width and height.
Returns:
torch.Tensor: Decoded boxes.
"""
assert pred_bboxes.size(0) == bboxes.size(0)
if pred_bboxes.ndim == 3:
assert pred_bboxes.size(1) == bboxes.size(1)
assert bboxes.size(-1) == 4
assert pred_bboxes.size(-1) == 5
if self.angle_range == 'v1':
return delta2bbox_v1(bboxes, pred_bboxes, self.means, self.stds,
wh_ratio_clip, self.add_ctr_clamp,
self.ctr_clamp)
elif self.angle_range == 'v2':
return delta2bbox_v2(bboxes, pred_bboxes, self.means, self.stds,
wh_ratio_clip, self.add_ctr_clamp,
self.ctr_clamp)
elif self.angle_range == 'v3':
return delta2bbox_v3(bboxes, pred_bboxes, self.means, self.stds,
wh_ratio_clip, self.add_ctr_clamp,
self.ctr_clamp)
else:
raise NotImplementedError
@mmcv.jit(coderize=True)
def bbox2delta_v1(proposals,
gt,
means=(0., 0., 0., 0., 0.),
stds=(1., 1., 1., 1., 1.)):
"""Compute deltas of proposals w.r.t.
gt.
We usually compute the deltas of x, y, w, h, a of proposals w.r.t ground
truth bboxes to get regression target.
This is the inverse function of :func:`delta2bbox`.
Args:
proposals (torch.Tensor): Boxes to be transformed, shape (N, ..., 4)
gt (torch.Tensor): Gt bboxes to be used as base, shape (N, ..., 5)
means (Sequence[float]): Denormalizing means for delta coordinates
stds (Sequence[float]): Denormalizing standard deviation for delta
coordinates.
Returns:
Tensor: deltas with shape (N, 5), where columns represent dx, dy,
dw, dh, da.
"""
proposals = proposals.float()
gt = gt.float()
px = (proposals[..., 0] + proposals[..., 2]) * 0.5
py = (proposals[..., 1] + proposals[..., 3]) * 0.5
pw = proposals[..., 2] - proposals[..., 0]
ph = proposals[..., 3] - proposals[..., 1]
gx = gt[..., 0]
gy = gt[..., 1]
gw = gt[..., 2]
gh = gt[..., 3]
ga = gt[..., 4]
dx = (gx - px) / pw
dy = (gy - py) / ph
dw = torch.log(gw / pw)
dh = torch.log(gh / ph)
da = ga
deltas = torch.stack([dx, dy, dw, dh, da], dim=-1)
means = deltas.new_tensor(means).unsqueeze(0)
stds = deltas.new_tensor(stds).unsqueeze(0)
deltas = deltas.sub_(means).div_(stds)
return deltas
@mmcv.jit(coderize=True)
def delta2bbox_v1(rois,
deltas,
means=(0., 0., 0., 0., 0.),
stds=(1., 1., 1., 1., 1.),
wh_ratio_clip=16 / 1000,
add_ctr_clamp=False,
ctr_clamp=32):
"""Apply deltas to shift/scale base boxes.
Typically the rois are anchor or proposed bounding boxes and the deltas are
network outputs used to shift/scale those boxes.
This is the inverse function of :func:`bbox2delta`.
Args:
rois (torch.Tensor): Boxes to be transformed. Has shape (N, 4).
deltas (torch.Tensor): Encoded offsets relative to each roi.
Has shape (N, num_classes * 5) or (N, 5). Note
N = num_base_anchors * W * H, when rois is a grid of
anchors. Offset encoding follows [1]_.
means (Sequence[float]): Denormalizing means for delta coordinates.
Default (0., 0., 0., 0., 0.).
stds (Sequence[float]): Denormalizing standard deviation for delta
coordinates. Default (1., 1., 1., 1., 1.).
wh_ratio_clip (float): Maximum aspect ratio for boxes. Default
16 / 1000.
add_ctr_clamp (bool): Whether to add center clamp, when added, the
predicted box is clamped is its center is too far away from
the original anchor's center. Only used by YOLOF. Default False.
ctr_clamp (int): the maximum pixel shift to clamp. Only used by YOLOF.
Default 32.
Returns:
Tensor: Boxes with shape (N, num_classes * 5) or (N, 5), where 5
represent cx, cy, w, h, a.
"""
means = deltas.new_tensor(means).view(1,
-1).repeat(1,
deltas.size(-1) // 5)
stds = deltas.new_tensor(stds).view(1, -1).repeat(1, deltas.size(-1) // 5)
denorm_deltas = deltas * stds + means
dx = denorm_deltas[..., 0::5]
dy = denorm_deltas[..., 1::5]
dw = denorm_deltas[..., 2::5]
dh = denorm_deltas[..., 3::5]
da = denorm_deltas[..., 4::5]
x1, y1 = rois[..., 0], rois[..., 1]
x2, y2 = rois[..., 2], rois[..., 3]
# Compute center of each roi
px = ((x1 + x2) * 0.5).unsqueeze(-1).expand_as(dx)
py = ((y1 + y2) * 0.5).unsqueeze(-1).expand_as(dy)
# Compute width/height of each roi
pw = (x2 - x1).unsqueeze(-1).expand_as(dw)
ph = (y2 - y1).unsqueeze(-1).expand_as(dh)
dx_width = pw * dx
dy_height = ph * dy
max_ratio = np.abs(np.log(wh_ratio_clip))
if add_ctr_clamp:
dx_width = torch.clamp(dx_width, max=ctr_clamp, min=-ctr_clamp)
dy_height = torch.clamp(dy_height, max=ctr_clamp, min=-ctr_clamp)
dw = torch.clamp(dw, max=max_ratio)
dh = torch.clamp(dh, max=max_ratio)
else:
dw = dw.clamp(min=-max_ratio, max=max_ratio)
dh = dh.clamp(min=-max_ratio, max=max_ratio)
# Use exp(network energy) to enlarge/shrink each roi
gw = pw * dw.exp()
gh = ph * dh.exp()
# Use network energy to shift the center of each roi
gx = px + dx_width
gy = py + dy_height
ga = da
bboxes = torch.stack([gx, gy, gw, gh, ga], dim=-1).view_as(deltas)
return bboxes
@mmcv.jit(coderize=True)
def bbox2delta_v2(proposals,
gt,
means=(0., 0., 0., 0., 0.),
stds=(1., 1., 1., 1., 1.)):
"""Compute deltas of proposals w.r.t.
gt.
We usually compute the deltas of x, y, w, h, a of proposals w.r.t ground
truth bboxes to get regression target.
This is the inverse function of :func:`delta2bbox`.
Args:
proposals (torch.Tensor): Boxes to be transformed, shape (N, ..., 4)
gt (torch.Tensor): Gt bboxes to be used as base, shape (N, ..., 5)
means (Sequence[float]): Denormalizing means for delta coordinates
stds (Sequence[float]): Denormalizing standard deviation for delta
coordinates.
Returns:
Tensor: deltas with shape (N, 5), where columns represent dx, dy,
dw, dh, da.
"""
proposals = proposals.float()
gt = gt.float()
px = (proposals[..., 0] + proposals[..., 2]) * 0.5
py = (proposals[..., 1] + proposals[..., 3]) * 0.5
pw = proposals[..., 2] - proposals[..., 0]
ph = proposals[..., 3] - proposals[..., 1]
gx = gt[..., 0]
gy = gt[..., 1]
gw = gt[..., 2]
gh = gt[..., 3]
ga = gt[..., 4]
dx = (gx - px) / pw
dy = (gy - py) / ph
dw = torch.log(gw / pw)
dh = torch.log(gh / ph)
da = ga / pi
deltas = torch.stack([dx, dy, dw, dh, da], dim=-1)
means = deltas.new_tensor(means).unsqueeze(0)
stds = deltas.new_tensor(stds).unsqueeze(0)
deltas = deltas.sub_(means).div_(stds)
return deltas
@mmcv.jit(coderize=True)
def delta2bbox_v2(rois,
deltas,
means=(0., 0., 0., 0., 0.),
stds=(1., 1., 1., 1., 1.),
wh_ratio_clip=16 / 1000,
add_ctr_clamp=False,
ctr_clamp=32):
"""Apply deltas to shift/scale base boxes.
Typically the rois are anchor or proposed bounding boxes and the deltas are
network outputs used to shift/scale those boxes.
This is the inverse function of :func:`bbox2delta`.
Args:
rois (torch.Tensor): Boxes to be transformed. Has shape (N, 4).
deltas (torch.Tensor): Encoded offsets relative to each roi.
Has shape (N, num_classes * 5) or (N, 5). Note
N = num_base_anchors * W * H, when rois is a grid of
anchors. Offset encoding follows [1]_.
means (Sequence[float]): Denormalizing means for delta coordinates.
Default (0., 0., 0., 0., 0.).
stds (Sequence[float]): Denormalizing standard deviation for delta
coordinates. Default (1., 1., 1., 1., 1.).
wh_ratio_clip (float): Maximum aspect ratio for boxes. Default
16 / 1000.
add_ctr_clamp (bool): Whether to add center clamp, when added, the
predicted box is clamped is its center is too far away from
the original anchor's center. Only used by YOLOF. Default False.
ctr_clamp (int): the maximum pixel shift to clamp. Only used by YOLOF.
Default 32.
Returns:
Tensor: Boxes with shape (N, num_classes * 5) or (N, 5), where 5
represent cx, cy, w, h, a.
"""
means = deltas.new_tensor(means).view(1,
-1).repeat(1,
deltas.size(-1) // 5)
stds = deltas.new_tensor(stds).view(1, -1).repeat(1, deltas.size(-1) // 5)
denorm_deltas = deltas * stds + means
dx = denorm_deltas[..., 0::5]
dy = denorm_deltas[..., 1::5]
dw = denorm_deltas[..., 2::5]
dh = denorm_deltas[..., 3::5]
da = denorm_deltas[..., 4::5] * pi
x1, y1 = rois[..., 0], rois[..., 1]
x2, y2 = rois[..., 2], rois[..., 3]
# Compute center of each roi
px = ((x1 + x2) * 0.5).unsqueeze(-1).expand_as(dx)
py = ((y1 + y2) * 0.5).unsqueeze(-1).expand_as(dy)
# Compute width/height of each roi
pw = (x2 - x1).unsqueeze(-1).expand_as(dw)
ph = (y2 - y1).unsqueeze(-1).expand_as(dh)
dx_width = pw * dx
dy_height = ph * dy
max_ratio = np.abs(np.log(wh_ratio_clip))
if add_ctr_clamp:
dx_width = torch.clamp(dx_width, max=ctr_clamp, min=-ctr_clamp)
dy_height = torch.clamp(dy_height, max=ctr_clamp, min=-ctr_clamp)
dw = torch.clamp(dw, max=max_ratio)
dh = torch.clamp(dh, max=max_ratio)
else:
dw = dw.clamp(min=-max_ratio, max=max_ratio)
dh = dh.clamp(min=-max_ratio, max=max_ratio)
# Use exp(network energy) to enlarge/shrink each roi
gw = pw * dw.exp()
gh = ph * dh.exp()
# Use network energy to shift the center of each roi
gx = px + dx_width
gy = py + dy_height
ga = (da + pi / 4) % pi - pi / 4
bboxes = torch.stack([gx, gy, gw, gh, ga], dim=-1).view_as(deltas)
return bboxes
@mmcv.jit(coderize=True)
def bbox2delta_v3(proposals,
gt,
means=(0., 0., 0., 0., 0.),
stds=(1., 1., 1., 1., 1.)):
"""Compute deltas of proposals w.r.t.
gt.
We usually compute the deltas of x, y, w, h, a of proposals w.r.t ground
truth bboxes to get regression target.
This is the inverse function of :func:`delta2bbox`.
Args:
proposals (torch.Tensor): Boxes to be transformed, shape (N, ..., 4)
gt (torch.Tensor): Gt bboxes to be used as base, shape (N, ..., 5)
means (Sequence[float]): Denormalizing means for delta coordinates
stds (Sequence[float]): Denormalizing standard deviation for delta
coordinates.
Returns:
Tensor: deltas with shape (N, 5), where columns represent dx, dy,
dw, dh, da.
"""
proposals = proposals.float()
gt = gt.float()
px = (proposals[..., 0] + proposals[..., 2]) * 0.5
py = (proposals[..., 1] + proposals[..., 3]) * 0.5
pw = proposals[..., 2] - proposals[..., 0]
ph = proposals[..., 3] - proposals[..., 1]
gx, gy, gw, gh, gtheta = gt.unbind(dim=-1)
dtheta1 = gtheta
dtheta1 = (dtheta1 + pi / 2) % pi - pi / 2
dtheta2 = gtheta + pi / 2
dtheta2 = (dtheta2 + pi / 2) % pi - pi / 2
abs_dtheta1 = torch.abs(dtheta1)
abs_dtheta2 = torch.abs(dtheta2)
gw_regular = torch.where(abs_dtheta1 < abs_dtheta2, gw, gh)
gh_regular = torch.where(abs_dtheta1 < abs_dtheta2, gh, gw)
dtheta = torch.where(abs_dtheta1 < abs_dtheta2, dtheta1, dtheta2)
dtheta /= 2 * pi
dx = (gx - px) / pw
dy = (gy - py) / ph
dw = torch.log(gw_regular / pw)
dh = torch.log(gh_regular / ph)
deltas = torch.stack([dx, dy, dw, dh, dtheta], dim=-1)
means = deltas.new_tensor(means).unsqueeze(0)
stds = deltas.new_tensor(stds).unsqueeze(0)
deltas = deltas.sub_(means).div_(stds)
return deltas
@mmcv.jit(coderize=True)
def delta2bbox_v3(rois,
deltas,
means=(0., 0., 0., 0., 0.),
stds=(1., 1., 1., 1., 1.),
wh_ratio_clip=16 / 1000,
add_ctr_clamp=False,
ctr_clamp=32):
"""Apply deltas to shift/scale base boxes.
Typically the rois are anchor or proposed bounding boxes and the deltas are
network outputs used to shift/scale those boxes.
This is the inverse function of :func:`bbox2delta`.
Args:
rois (torch.Tensor): Boxes to be transformed. Has shape (N, 4).
deltas (torch.Tensor): Encoded offsets relative to each roi.
Has shape (N, num_classes * 5) or (N, 5). Note
N = num_base_anchors * W * H, when rois is a grid of
anchors. Offset encoding follows [1]_.
means (Sequence[float]): Denormalizing means for delta coordinates.
Default (0., 0., 0., 0., 0.).
stds (Sequence[float]): Denormalizing standard deviation for delta
coordinates. Default (1., 1., 1., 1., 1.).
wh_ratio_clip (float): Maximum aspect ratio for boxes. Default
16 / 1000.
add_ctr_clamp (bool): Whether to add center clamp, when added, the
predicted box is clamped is its center is too far away from
the original anchor's center. Only used by YOLOF. Default False.
ctr_clamp (int): the maximum pixel shift to clamp. Only used by YOLOF.
Default 32.
Returns:
Tensor: Boxes with shape (N, num_classes * 5) or (N, 5), where 5
represent cx, cy, w, h, a.
"""
means = deltas.new_tensor(means).view(1,
-1).repeat(1,
deltas.size(-1) // 5)
stds = deltas.new_tensor(stds).view(1, -1).repeat(1, deltas.size(-1) // 5)
denorm_deltas = deltas * stds + means
dx = denorm_deltas[..., 0::5]
dy = denorm_deltas[..., 1::5]
dw = denorm_deltas[..., 2::5]
dh = denorm_deltas[..., 3::5]
da = denorm_deltas[..., 4::5] * 2 * pi
x1, y1 = rois[..., 0], rois[..., 1]
x2, y2 = rois[..., 2], rois[..., 3]
# Compute center of each roi
px = ((x1 + x2) * 0.5).unsqueeze(-1).expand_as(dx)
py = ((y1 + y2) * 0.5).unsqueeze(-1).expand_as(dy)
# Compute width/height of each roi
pw = (x2 - x1).unsqueeze(-1).expand_as(dw)
ph = (y2 - y1).unsqueeze(-1).expand_as(dh)
dx_width = pw * dx
dy_height = ph * dy
max_ratio = np.abs(np.log(wh_ratio_clip))
if add_ctr_clamp:
dx_width = torch.clamp(dx_width, max=ctr_clamp, min=-ctr_clamp)
dy_height = torch.clamp(dy_height, max=ctr_clamp, min=-ctr_clamp)
dw = torch.clamp(dw, max=max_ratio)
dh = torch.clamp(dh, max=max_ratio)
else:
dw = dw.clamp(min=-max_ratio, max=max_ratio)
dh = dh.clamp(min=-max_ratio, max=max_ratio)
# Use exp(network energy) to enlarge/shrink each roi
gw = pw * dw.exp()
gh = ph * dh.exp()
# Use network energy to shift the center of each roi
gx = px + dx_width
gy = py + dy_height
gtheta = (da + pi / 2) % pi - pi / 2
w_regular = torch.where(gw > gh, gw, gh)
h_regular = torch.where(gw > gh, gh, gw)
theta_regular = torch.where(gw > gh, gtheta, gtheta + pi / 2)
theta_regular = (theta_regular + pi / 2) % pi - pi / 2
bboxes = torch.stack([gx, gy, w_regular, h_regular, theta_regular],
dim=-1).view_as(deltas)
return bboxes
| 39.891348
| 79
| 0.566377
| 2,775
| 19,826
| 3.943423
| 0.087928
| 0.008407
| 0.008224
| 0.007311
| 0.834872
| 0.807548
| 0.783697
| 0.766974
| 0.766974
| 0.766974
| 0
| 0.0344
| 0.303541
| 19,826
| 496
| 80
| 39.971774
| 0.758111
| 0.407092
| 0
| 0.735294
| 0
| 0
| 0.001275
| 0
| 0
| 0
| 0
| 0
| 0.025735
| 1
| 0.033088
| false
| 0
| 0.018382
| 0
| 0.099265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4893faeb21e782e04d2309e272555dcd76587b1b
| 15,700
|
py
|
Python
|
tests/test_neighbor.py
|
minionatwork/sonic-swss
|
544a45c906a73738c951f373f0b2b9e4133586ee
|
[
"Apache-2.0"
] | 132
|
2016-03-10T00:48:36.000Z
|
2022-03-10T14:09:50.000Z
|
tests/test_neighbor.py
|
minionatwork/sonic-swss
|
544a45c906a73738c951f373f0b2b9e4133586ee
|
[
"Apache-2.0"
] | 1,644
|
2016-03-15T19:53:10.000Z
|
2022-03-31T20:51:06.000Z
|
tests/test_neighbor.py
|
minionatwork/sonic-swss
|
544a45c906a73738c951f373f0b2b9e4133586ee
|
[
"Apache-2.0"
] | 432
|
2016-03-09T18:10:28.000Z
|
2022-03-30T14:39:15.000Z
|
import time
import json
import pytest
from swsscommon import swsscommon
class TestNeighbor(object):
def setup_db(self, dvs):
self.pdb = swsscommon.DBConnector(0, dvs.redis_sock, 0)
self.adb = swsscommon.DBConnector(1, dvs.redis_sock, 0)
self.cdb = swsscommon.DBConnector(4, dvs.redis_sock, 0)
def set_admin_status(self, interface, status):
tbl = swsscommon.Table(self.cdb, "PORT")
fvs = swsscommon.FieldValuePairs([("admin_status", status)])
tbl.set(interface, fvs)
time.sleep(1)
def create_vrf(self, vrf_name):
tbl = swsscommon.Table(self.cdb, "VRF")
fvs = swsscommon.FieldValuePairs([('empty', 'empty')])
tbl.set(vrf_name, fvs)
time.sleep(1)
def remove_vrf(self, vrf_name):
tbl = swsscommon.Table(self.cdb, "VRF")
tbl._del(vrf_name)
time.sleep(1)
def create_l3_intf(self, interface, vrf_name):
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_ROUTER_INTERFACE")
initial_entries = set(tbl.getKeys())
tbl = swsscommon.Table(self.cdb, "INTERFACE")
if len(vrf_name) == 0:
fvs = swsscommon.FieldValuePairs([("NULL", "NULL")])
else:
fvs = swsscommon.FieldValuePairs([("vrf_name", vrf_name)])
tbl.set(interface, fvs)
time.sleep(1)
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_ROUTER_INTERFACE")
current_entries = set(tbl.getKeys())
assert len(current_entries - initial_entries) == 1
return list(current_entries - initial_entries)[0]
def remove_l3_intf(self, interface):
tbl = swsscommon.Table(self.cdb, "INTERFACE")
tbl._del(interface)
time.sleep(1)
def add_ip_address(self, interface, ip):
tbl = swsscommon.Table(self.cdb, "INTERFACE")
fvs = swsscommon.FieldValuePairs([("NULL", "NULL")])
tbl.set(interface + "|" + ip, fvs)
time.sleep(2) # IPv6 netlink message needs longer time
def remove_ip_address(self, interface, ip):
tbl = swsscommon.Table(self.cdb, "INTERFACE")
tbl._del(interface + "|" + ip)
time.sleep(1)
def add_neighbor(self, interface, ip, mac):
tbl = swsscommon.Table(self.cdb, "NEIGH")
fvs = swsscommon.FieldValuePairs([("neigh", mac)])
tbl.set(interface + "|" + ip, fvs)
time.sleep(1)
def remove_neighbor(self, interface, ip):
tbl = swsscommon.Table(self.cdb, "NEIGH")
tbl._del(interface + "|" + ip)
time.sleep(1)
def test_NeighborAddRemoveIpv6(self, dvs, testlog):
self.setup_db(dvs)
# bring up interface
# NOTE: For IPv6, only when the interface is up will the netlink message
# get generated.
self.set_admin_status("Ethernet8", "up")
# create interface and get rif_oid
rif_oid = self.create_l3_intf("Ethernet8", "")
# assign IP to interface
self.add_ip_address("Ethernet8", "2000::1/64")
# add neighbor
self.add_neighbor("Ethernet8", "2000::2", "00:01:02:03:04:05")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:Ethernet8")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 1
assert intf_entries[0] == "2000::2"
(status, fvs) = tbl.get(intf_entries[0])
assert status == True
assert len(fvs) == 2
for fv in fvs:
if fv[0] == "neigh":
assert fv[1] == "00:01:02:03:04:05"
elif fv[0] == "family":
assert fv[1] == "IPv6"
else:
assert False
# check ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 1
route = json.loads(intf_entries[0])
assert route["ip"] == "2000::2"
assert route["rif"] == rif_oid
(status, fvs) = tbl.get(intf_entries[0])
assert status == True
for fv in fvs:
if fv[0] == "SAI_NEIGHBOR_ENTRY_ATTR_DST_MAC_ADDRESS":
assert fv[1] == "00:01:02:03:04:05"
# remove neighbor
self.remove_neighbor("Ethernet8", "2000::2")
# remove IP from interface
self.remove_ip_address("Ethernet8", "2000::1/64")
# remove interface
self.remove_l3_intf("Ethernet8")
# bring down interface
self.set_admin_status("Ethernet8", "down")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:Ethernet8")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 0
# check ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 0
def test_NeighborAddRemoveIpv4(self, dvs, testlog):
self.setup_db(dvs)
# bring up interface
self.set_admin_status("Ethernet8", "up")
# create interface and get rif_oid
rif_oid = self.create_l3_intf("Ethernet8", "")
# assign IP to interface
self.add_ip_address("Ethernet8", "10.0.0.1/24")
# add neighbor
self.add_neighbor("Ethernet8", "10.0.0.2", "00:01:02:03:04:05")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:Ethernet8")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 1
assert intf_entries[0] == "10.0.0.2"
(status, fvs) = tbl.get(intf_entries[0])
assert status == True
assert len(fvs) == 2
for fv in fvs:
if fv[0] == "neigh":
assert fv[1] == "00:01:02:03:04:05"
elif fv[0] == "family":
assert fv[1] == "IPv4"
else:
assert False
# check ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 1
route = json.loads(intf_entries[0])
assert route["ip"] == "10.0.0.2"
assert route["rif"] == rif_oid
(status, fvs) = tbl.get(intf_entries[0])
assert status == True
for fv in fvs:
if fv[0] == "SAI_NEIGHBOR_ENTRY_ATTR_DST_MAC_ADDRESS":
assert fv[1] == "00:01:02:03:04:05"
# remove neighbor
self.remove_neighbor("Ethernet8", "10.0.0.2")
# remove IP from interface
self.remove_ip_address("Ethernet8", "10.0.0.1/24")
# remove interface
self.remove_l3_intf("Ethernet8")
# bring down interface
self.set_admin_status("Ethernet8", "down")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:Ethernet8")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 0
# check ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
intf_entries = tbl.getKeys()
assert len(intf_entries) == 0
def test_NeighborAddRemoveIpv6WithVrf(self, dvs, testlog):
self.setup_db(dvs)
for i in [0, 4]:
# record ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
old_neigh_entries = set(tbl.getKeys())
intf_name = "Ethernet" + str(i)
vrf_name = "Vrf_" + str(i)
# bring up interface
self.set_admin_status(intf_name, "up")
# create vrf
self.create_vrf(vrf_name)
# create interface and get rif_oid
rif_oid = self.create_l3_intf(intf_name, vrf_name)
# assign IP to interface
self.add_ip_address(intf_name, "2000::1/64")
# add neighbor
self.add_neighbor(intf_name, "2000::2", "00:01:02:03:04:05")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:" + intf_name)
neigh_entries = tbl.getKeys()
assert len(neigh_entries) == 1
assert neigh_entries[0] == "2000::2"
(status, fvs) = tbl.get(neigh_entries[0])
assert status == True
assert len(fvs) == 2
for fv in fvs:
if fv[0] == "neigh":
assert fv[1] == "00:01:02:03:04:05"
elif fv[0] == "family":
assert fv[1] == "IPv6"
else:
assert False
# check ASIC neighbor interface database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
current_neigh_entries = set(tbl.getKeys())
neigh_entries = list(current_neigh_entries - old_neigh_entries)
assert len(neigh_entries) == 1
route = json.loads(neigh_entries[0])
assert route["ip"] == "2000::2"
assert route["rif"] == rif_oid
(status, fvs) = tbl.get(neigh_entries[0])
assert status == True
for fv in fvs:
if fv[0] == "SAI_NEIGHBOR_ENTRY_ATTR_DST_MAC_ADDRESS":
assert fv[1] == "00:01:02:03:04:05"
for i in [0, 4]:
# record ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
old_neigh_entries_cnt = len(tbl.getKeys())
intf_name = "Ethernet" + str(i)
vrf_name = "Vrf_" + str(i)
# remove neighbor
self.remove_neighbor(intf_name, "2000::2")
# remove IP from interface
self.remove_ip_address(intf_name, "2000::1/64")
# remove interface
self.remove_l3_intf(intf_name)
# remove vrf
self.remove_vrf(vrf_name)
# bring down interface
self.set_admin_status(intf_name, "down")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:" + intf_name)
intf_entries = tbl.getKeys()
assert len(intf_entries) == 0
# check ASIC neighbor interface database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
current_neigh_entries_cnt = len(tbl.getKeys())
dec_neigh_entries_cnt = (old_neigh_entries_cnt - current_neigh_entries_cnt)
assert dec_neigh_entries_cnt == 1
def test_NeighborAddRemoveIpv4WithVrf(self, dvs, testlog):
self.setup_db(dvs)
for i in [0, 4]:
# record ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
old_neigh_entries = set(tbl.getKeys())
intf_name = "Ethernet" + str(i)
vrf_name = "Vrf_" + str(i)
# bring up interface
self.set_admin_status(intf_name, "up")
# create vrf
self.create_vrf(vrf_name)
# create interface and get rif_oid
rif_oid = self.create_l3_intf(intf_name, vrf_name)
# assign IP to interface
self.add_ip_address(intf_name, "10.0.0.1/24")
# add neighbor
self.add_neighbor(intf_name, "10.0.0.2", "00:01:02:03:04:05")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:" + intf_name)
neigh_entries = tbl.getKeys()
assert len(neigh_entries) == 1
assert neigh_entries[0] == "10.0.0.2"
(status, fvs) = tbl.get(neigh_entries[0])
assert status == True
assert len(fvs) == 2
for fv in fvs:
if fv[0] == "neigh":
assert fv[1] == "00:01:02:03:04:05"
elif fv[0] == "family":
assert fv[1] == "IPv4"
else:
assert False
# check ASIC neighbor interface database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
current_neigh_entries = set(tbl.getKeys())
neigh_entries = list(current_neigh_entries - old_neigh_entries)
assert len(neigh_entries) == 1
route = json.loads(neigh_entries[0])
assert route["ip"] == "10.0.0.2"
assert route["rif"] == rif_oid
(status, fvs) = tbl.get(neigh_entries[0])
assert status == True
for fv in fvs:
if fv[0] == "SAI_NEIGHBOR_ENTRY_ATTR_DST_MAC_ADDRESS":
assert fv[1] == "00:01:02:03:04:05"
for i in [0, 4]:
# record ASIC neighbor database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
old_neigh_entries_cnt = len(tbl.getKeys())
intf_name = "Ethernet" + str(i)
vrf_name = "Vrf_" + str(i)
# remove neighbor
self.remove_neighbor(intf_name, "10.0.0.2")
# remove IP from interface
self.remove_ip_address(intf_name, "10.0.0.1/24")
# remove interface
self.remove_l3_intf(intf_name)
# remove vrf
self.remove_vrf(vrf_name)
# bring down interface
self.set_admin_status(intf_name, "down")
# check application database
tbl = swsscommon.Table(self.pdb, "NEIGH_TABLE:" + intf_name)
intf_entries = tbl.getKeys()
assert len(intf_entries) == 0
# check ASIC neighbor interface database
tbl = swsscommon.Table(self.adb, "ASIC_STATE:SAI_OBJECT_TYPE_NEIGHBOR_ENTRY")
current_neigh_entries_cnt = len(tbl.getKeys())
dec_neigh_entries_cnt = (old_neigh_entries_cnt - current_neigh_entries_cnt)
assert dec_neigh_entries_cnt == 1
def test_FlushResolveNeighborIpv6(self, dvs, testlog):
appl_db = swsscommon.DBConnector(swsscommon.APPL_DB, dvs.redis_sock, 0)
prod_state_tbl = swsscommon.ProducerStateTable(appl_db, swsscommon.APP_NEIGH_RESOLVE_TABLE_NAME)
fvs = swsscommon.FieldValuePairs([("mac", "52:54:00:25:06:E9")])
prod_state_tbl.set("Vlan2:2000:1::1", fvs)
time.sleep(2)
(exitcode, output) = dvs.runcmd(['sh', '-c', "supervisorctl status nbrmgrd | awk '{print $2}'"])
assert output == "RUNNING\n"
def test_FlushResolveNeighborIpv4(self, dvs, testlog):
appl_db = swsscommon.DBConnector(swsscommon.APPL_DB, dvs.redis_sock, 0)
prod_state_tbl = swsscommon.ProducerStateTable(appl_db, swsscommon.APP_NEIGH_RESOLVE_TABLE_NAME)
fvs = swsscommon.FieldValuePairs([("mac", "52:54:00:25:06:E9")])
prod_state_tbl.set("Vlan2:192.168.10.1", fvs)
time.sleep(2)
(exitcode, output) = dvs.runcmd(['sh', '-c', "supervisorctl status nbrmgrd | awk '{print $2}'"])
assert output == "RUNNING\n"
# Add Dummy always-pass test at end as workaroud
# for issue when Flaky fail on final test it invokes module tear-down before retrying
def test_nonflaky_dummy():
pass
| 37.292162
| 105
| 0.57121
| 1,906
| 15,700
| 4.505247
| 0.089192
| 0.050309
| 0.064982
| 0.079422
| 0.876325
| 0.855596
| 0.840922
| 0.825783
| 0.807267
| 0.798183
| 0
| 0.042218
| 0.316561
| 15,700
| 420
| 106
| 37.380952
| 0.758062
| 0.100318
| 0
| 0.773234
| 0
| 0
| 0.13736
| 0.059957
| 0
| 0
| 0
| 0
| 0.219331
| 1
| 0.063197
| false
| 0.003717
| 0.01487
| 0
| 0.085502
| 0.007435
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48a4e7351908351ed05638f402b4fff6a5965de2
| 47,872
|
py
|
Python
|
accelbyte_py_sdk/api/lobby/wrappers/_notification.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/lobby/wrappers/_notification.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/lobby/wrappers/_notification.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import HeaderStr
from ....core import get_namespace as get_services_namespace
from ....core import run_request
from ....core import run_request_async
from ....core import same_doc_as
from ..models import ModelBulkUsersFreeFormNotificationRequestV1
from ..models import ModelCreateTemplateRequest
from ..models import ModelCreateTopicRequest
from ..models import ModelCreateTopicRequestV1
from ..models import ModelFreeFormNotificationRequest
from ..models import ModelFreeFormNotificationRequestV1
from ..models import ModelGetAllNotificationTemplateSlugResp
from ..models import ModelGetAllNotificationTopicsResponse
from ..models import ModelLocalization
from ..models import ModelNotificationTemplateResponse
from ..models import ModelNotificationTopicResponse
from ..models import ModelNotificationTopicResponseV1
from ..models import ModelNotificationWithTemplateRequest
from ..models import ModelNotificationWithTemplateRequestV1
from ..models import ModelTemplateLocalization
from ..models import ModelTemplateLocalizationResponse
from ..models import ModelTemplateResponse
from ..models import ModelTopicByNamespacesResponse
from ..models import ModelUpdateTemplateRequest
from ..models import ModelUpdateTopicRequest
from ..models import RestapiErrorResponseBody
from ..models import RestapiErrorResponseV1
from ..operations.notification import CreateNotificationTemplateV1Admin
from ..operations.notification import CreateNotificationTopicV1Admin
from ..operations.notification import CreateTemplate
from ..operations.notification import CreateTopic
from ..operations.notification import DeleteNotificationTemplateSlugV1Admin
from ..operations.notification import DeleteNotificationTopicV1Admin
from ..operations.notification import DeleteTemplateLocalization
from ..operations.notification import DeleteTemplateLocalizationV1Admin
from ..operations.notification import DeleteTemplateSlug
from ..operations.notification import DeleteTopicByTopicName
from ..operations.notification import FreeFormNotification
from ..operations.notification import FreeFormNotificationByUserID
from ..operations.notification import GetAllNotificationTemplatesV1Admin
from ..operations.notification import GetAllNotificationTopicsV1Admin
from ..operations.notification import GetGameTemplate
from ..operations.notification import GetLocalizationTemplate
from ..operations.notification import GetNotificationTopicV1Admin
from ..operations.notification import GetSingleTemplateLocalizationV1Admin
from ..operations.notification import GetSlugTemplate
from ..operations.notification import GetTemplateSlugLocalizationsTemplateV1Admin
from ..operations.notification import GetTopicByNamespace
from ..operations.notification import GetTopicByTopicName
from ..operations.notification import NotificationWithTemplate
from ..operations.notification import NotificationWithTemplateByUserID
from ..operations.notification import PublishTemplate
from ..operations.notification import PublishTemplateLocalizationV1Admin
from ..operations.notification import SendMultipleUsersFreeformNotificationV1Admin
from ..operations.notification import SendPartyFreeformNotificationV1Admin
from ..operations.notification import SendPartyTemplatedNotificationV1Admin
from ..operations.notification import SendSpecificUserFreeformNotificationV1Admin
from ..operations.notification import SendSpecificUserTemplatedNotificationV1Admin
from ..operations.notification import SendUsersFreeformNotificationV1Admin
from ..operations.notification import SendUsersTemplatedNotificationV1Admin
from ..operations.notification import UpdateLocalizationTemplate
from ..operations.notification import UpdateNotificationTopicV1Admin
from ..operations.notification import UpdateTemplateLocalizationV1Admin
from ..operations.notification import UpdateTopicByTopicName
@same_doc_as(CreateNotificationTemplateV1Admin)
def create_notification_template_v1_admin(body: ModelCreateTemplateRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateNotificationTemplateV1Admin.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateNotificationTemplateV1Admin)
async def create_notification_template_v1_admin_async(body: ModelCreateTemplateRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateNotificationTemplateV1Admin.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateNotificationTopicV1Admin)
def create_notification_topic_v1_admin(body: ModelCreateTopicRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateNotificationTopicV1Admin.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateNotificationTopicV1Admin)
async def create_notification_topic_v1_admin_async(body: ModelCreateTopicRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateNotificationTopicV1Admin.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateTemplate)
def create_template(body: ModelCreateTemplateRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateTemplate.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateTemplate)
async def create_template_async(body: ModelCreateTemplateRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateTemplate.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateTopic)
def create_topic(body: ModelCreateTopicRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateTopic.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateTopic)
async def create_topic_async(body: ModelCreateTopicRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateTopic.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteNotificationTemplateSlugV1Admin)
def delete_notification_template_slug_v1_admin(template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteNotificationTemplateSlugV1Admin.create(
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteNotificationTemplateSlugV1Admin)
async def delete_notification_template_slug_v1_admin_async(template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteNotificationTemplateSlugV1Admin.create(
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteNotificationTopicV1Admin)
def delete_notification_topic_v1_admin(topic_name: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteNotificationTopicV1Admin.create(
topic_name=topic_name,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteNotificationTopicV1Admin)
async def delete_notification_topic_v1_admin_async(topic_name: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteNotificationTopicV1Admin.create(
topic_name=topic_name,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTemplateLocalization)
def delete_template_localization(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTemplateLocalization.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTemplateLocalization)
async def delete_template_localization_async(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTemplateLocalization.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTemplateLocalizationV1Admin)
def delete_template_localization_v1_admin(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTemplateLocalizationV1Admin.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTemplateLocalizationV1Admin)
async def delete_template_localization_v1_admin_async(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTemplateLocalizationV1Admin.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTemplateSlug)
def delete_template_slug(template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTemplateSlug.create(
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTemplateSlug)
async def delete_template_slug_async(template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTemplateSlug.create(
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTopicByTopicName)
def delete_topic_by_topic_name(topic: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTopicByTopicName.create(
topic=topic,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteTopicByTopicName)
async def delete_topic_by_topic_name_async(topic: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteTopicByTopicName.create(
topic=topic,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(FreeFormNotification)
def free_form_notification(body: ModelFreeFormNotificationRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = FreeFormNotification.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(FreeFormNotification)
async def free_form_notification_async(body: ModelFreeFormNotificationRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = FreeFormNotification.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(FreeFormNotificationByUserID)
def free_form_notification_by_user_id(body: ModelFreeFormNotificationRequest, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = FreeFormNotificationByUserID.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(FreeFormNotificationByUserID)
async def free_form_notification_by_user_id_async(body: ModelFreeFormNotificationRequest, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = FreeFormNotificationByUserID.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetAllNotificationTemplatesV1Admin)
def get_all_notification_templates_v1_admin(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetAllNotificationTemplatesV1Admin.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetAllNotificationTemplatesV1Admin)
async def get_all_notification_templates_v1_admin_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetAllNotificationTemplatesV1Admin.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetAllNotificationTopicsV1Admin)
def get_all_notification_topics_v1_admin(after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetAllNotificationTopicsV1Admin.create(
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetAllNotificationTopicsV1Admin)
async def get_all_notification_topics_v1_admin_async(after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetAllNotificationTopicsV1Admin.create(
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGameTemplate)
def get_game_template(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGameTemplate.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGameTemplate)
async def get_game_template_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGameTemplate.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetLocalizationTemplate)
def get_localization_template(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetLocalizationTemplate.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetLocalizationTemplate)
async def get_localization_template_async(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetLocalizationTemplate.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetNotificationTopicV1Admin)
def get_notification_topic_v1_admin(topic_name: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetNotificationTopicV1Admin.create(
topic_name=topic_name,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetNotificationTopicV1Admin)
async def get_notification_topic_v1_admin_async(topic_name: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetNotificationTopicV1Admin.create(
topic_name=topic_name,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetSingleTemplateLocalizationV1Admin)
def get_single_template_localization_v1_admin(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetSingleTemplateLocalizationV1Admin.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetSingleTemplateLocalizationV1Admin)
async def get_single_template_localization_v1_admin_async(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetSingleTemplateLocalizationV1Admin.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetSlugTemplate)
def get_slug_template(template_slug: str, after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetSlugTemplate.create(
template_slug=template_slug,
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetSlugTemplate)
async def get_slug_template_async(template_slug: str, after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetSlugTemplate.create(
template_slug=template_slug,
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetTemplateSlugLocalizationsTemplateV1Admin)
def get_template_slug_localizations_template_v1_admin(template_slug: str, after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetTemplateSlugLocalizationsTemplateV1Admin.create(
template_slug=template_slug,
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetTemplateSlugLocalizationsTemplateV1Admin)
async def get_template_slug_localizations_template_v1_admin_async(template_slug: str, after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetTemplateSlugLocalizationsTemplateV1Admin.create(
template_slug=template_slug,
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetTopicByNamespace)
def get_topic_by_namespace(after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetTopicByNamespace.create(
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetTopicByNamespace)
async def get_topic_by_namespace_async(after: Optional[str] = None, before: Optional[str] = None, limit: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetTopicByNamespace.create(
after=after,
before=before,
limit=limit,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetTopicByTopicName)
def get_topic_by_topic_name(topic: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetTopicByTopicName.create(
topic=topic,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetTopicByTopicName)
async def get_topic_by_topic_name_async(topic: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetTopicByTopicName.create(
topic=topic,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(NotificationWithTemplate)
def notification_with_template(body: ModelNotificationWithTemplateRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = NotificationWithTemplate.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(NotificationWithTemplate)
async def notification_with_template_async(body: ModelNotificationWithTemplateRequest, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = NotificationWithTemplate.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(NotificationWithTemplateByUserID)
def notification_with_template_by_user_id(body: ModelNotificationWithTemplateRequest, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = NotificationWithTemplateByUserID.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(NotificationWithTemplateByUserID)
async def notification_with_template_by_user_id_async(body: ModelNotificationWithTemplateRequest, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = NotificationWithTemplateByUserID.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublishTemplate)
def publish_template(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublishTemplate.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublishTemplate)
async def publish_template_async(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublishTemplate.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublishTemplateLocalizationV1Admin)
def publish_template_localization_v1_admin(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublishTemplateLocalizationV1Admin.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublishTemplateLocalizationV1Admin)
async def publish_template_localization_v1_admin_async(template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublishTemplateLocalizationV1Admin.create(
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendMultipleUsersFreeformNotificationV1Admin)
def send_multiple_users_freeform_notification_v1_admin(body: ModelBulkUsersFreeFormNotificationRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendMultipleUsersFreeformNotificationV1Admin.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendMultipleUsersFreeformNotificationV1Admin)
async def send_multiple_users_freeform_notification_v1_admin_async(body: ModelBulkUsersFreeFormNotificationRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendMultipleUsersFreeformNotificationV1Admin.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendPartyFreeformNotificationV1Admin)
def send_party_freeform_notification_v1_admin(body: ModelFreeFormNotificationRequestV1, party_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendPartyFreeformNotificationV1Admin.create(
body=body,
party_id=party_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendPartyFreeformNotificationV1Admin)
async def send_party_freeform_notification_v1_admin_async(body: ModelFreeFormNotificationRequestV1, party_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendPartyFreeformNotificationV1Admin.create(
body=body,
party_id=party_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendPartyTemplatedNotificationV1Admin)
def send_party_templated_notification_v1_admin(body: ModelNotificationWithTemplateRequestV1, party_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendPartyTemplatedNotificationV1Admin.create(
body=body,
party_id=party_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendPartyTemplatedNotificationV1Admin)
async def send_party_templated_notification_v1_admin_async(body: ModelNotificationWithTemplateRequestV1, party_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendPartyTemplatedNotificationV1Admin.create(
body=body,
party_id=party_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendSpecificUserFreeformNotificationV1Admin)
def send_specific_user_freeform_notification_v1_admin(body: ModelFreeFormNotificationRequestV1, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendSpecificUserFreeformNotificationV1Admin.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendSpecificUserFreeformNotificationV1Admin)
async def send_specific_user_freeform_notification_v1_admin_async(body: ModelFreeFormNotificationRequestV1, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendSpecificUserFreeformNotificationV1Admin.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendSpecificUserTemplatedNotificationV1Admin)
def send_specific_user_templated_notification_v1_admin(body: ModelNotificationWithTemplateRequestV1, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendSpecificUserTemplatedNotificationV1Admin.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendSpecificUserTemplatedNotificationV1Admin)
async def send_specific_user_templated_notification_v1_admin_async(body: ModelNotificationWithTemplateRequestV1, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendSpecificUserTemplatedNotificationV1Admin.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendUsersFreeformNotificationV1Admin)
def send_users_freeform_notification_v1_admin(body: ModelFreeFormNotificationRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendUsersFreeformNotificationV1Admin.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendUsersFreeformNotificationV1Admin)
async def send_users_freeform_notification_v1_admin_async(body: ModelFreeFormNotificationRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendUsersFreeformNotificationV1Admin.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendUsersTemplatedNotificationV1Admin)
def send_users_templated_notification_v1_admin(body: ModelNotificationWithTemplateRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendUsersTemplatedNotificationV1Admin.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(SendUsersTemplatedNotificationV1Admin)
async def send_users_templated_notification_v1_admin_async(body: ModelNotificationWithTemplateRequestV1, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = SendUsersTemplatedNotificationV1Admin.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateLocalizationTemplate)
def update_localization_template(body: ModelUpdateTemplateRequest, template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateLocalizationTemplate.create(
body=body,
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateLocalizationTemplate)
async def update_localization_template_async(body: ModelUpdateTemplateRequest, template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateLocalizationTemplate.create(
body=body,
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateNotificationTopicV1Admin)
def update_notification_topic_v1_admin(body: ModelUpdateTopicRequest, topic_name: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateNotificationTopicV1Admin.create(
body=body,
topic_name=topic_name,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateNotificationTopicV1Admin)
async def update_notification_topic_v1_admin_async(body: ModelUpdateTopicRequest, topic_name: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateNotificationTopicV1Admin.create(
body=body,
topic_name=topic_name,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateTemplateLocalizationV1Admin)
def update_template_localization_v1_admin(body: ModelUpdateTemplateRequest, template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateTemplateLocalizationV1Admin.create(
body=body,
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateTemplateLocalizationV1Admin)
async def update_template_localization_v1_admin_async(body: ModelUpdateTemplateRequest, template_language: str, template_slug: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateTemplateLocalizationV1Admin.create(
body=body,
template_language=template_language,
template_slug=template_slug,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateTopicByTopicName)
def update_topic_by_topic_name(body: ModelUpdateTopicRequest, topic: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateTopicByTopicName.create(
body=body,
topic=topic,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateTopicByTopicName)
async def update_topic_by_topic_name_async(body: ModelUpdateTopicRequest, topic: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateTopicByTopicName.create(
body=body,
topic=topic,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
| 43.283906
| 272
| 0.744068
| 5,126
| 47,872
| 6.698595
| 0.03531
| 0.109911
| 0.077584
| 0.051723
| 0.868742
| 0.861578
| 0.848938
| 0.827708
| 0.814136
| 0.814136
| 0
| 0.004203
| 0.174925
| 47,872
| 1,105
| 273
| 43.323077
| 0.865132
| 0.016001
| 0
| 0.771214
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039742
| false
| 0
| 0.069817
| 0
| 0.268528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
48f079cd027eb4db02acec5cf9fabe50c962342d
| 3,780
|
py
|
Python
|
src/pdr/tests/test_LRO.py
|
MillionConcepts/clementine-conversion
|
e4781c4c700e862d4718f2cca1904991d4c2d161
|
[
"BSD-3-Clause"
] | 1
|
2022-02-14T09:22:46.000Z
|
2022-02-14T09:22:46.000Z
|
src/pdr/tests/test_LRO.py
|
MillionConcepts/clementine-conversion
|
e4781c4c700e862d4718f2cca1904991d4c2d161
|
[
"BSD-3-Clause"
] | null | null | null |
src/pdr/tests/test_LRO.py
|
MillionConcepts/clementine-conversion
|
e4781c4c700e862d4718f2cca1904991d4c2d161
|
[
"BSD-3-Clause"
] | null | null | null |
""" Test performance for LRO data. """
import unittest
import pdr
# Cameras
class TestLymanAlpha(unittest.TestCase):
def setUp(self):
pass
def test_lyman_alpha_gdr_1(self):
url = "http://pds-imaging.jpl.nasa.gov/data/lro/lamp/gdr/LROLAM_2001/DATA/DATA_QUALITY/LAMP_80n_240mpp_long_dqual_01.img"
data = pdr.open(pdr.get(url))
self.assertEqual(data.IMAGE.shape[0], 2501)
self.assertEqual(data.IMAGE.shape[1], 2501)
self.assertEqual(len(data.LABEL), 41)
def test_lyman_alpha_edr_1(self):
url = "http://pds-imaging.jpl.nasa.gov/data/lro/lamp/edr/LROLAM_0007/DATA/2011082/LAMP_ENG_0322531705_02.fit"
# data = pdr.open(pdr.get(url))
# self.assertEqual(data.IMAGE.shape[0],2501)
# self.assertEqual(data.IMAGE.shape[1],2501)
# self.assertEqual(len(data.LABEL),41)
def test_lyman_alpha_rdr_1(self):
url = "http://pds-imaging.jpl.nasa.gov/data/lro/lamp/rdr/LROLAM_1010/DATA/2011352/LAMP_SCI_0345885974_03.fit"
# data = pdr.open(pdr.get(url))
# self.assertEqual(data.IMAGE.shape[0],2501)
# self.assertEqual(data.IMAGE.shape[1],2501)
# self.assertEqual(len(data.LABEL),41)
suite = unittest.TestLoader().loadTestsFromTestCase(TestLymanAlpha)
unittest.TextTestRunner(verbosity=2).run(suite)
class TestLROC(unittest.TestCase):
def setUp(self):
pass
def test_lroc_esm_nac_1(self): # Large file (252Mb)
url = "http://lroc.sese.asu.edu/data/LRO-L-LROC-3-CDR-V1.0/LROLRC_1015/DATA/ESM/2013092/NAC/M1119524889RC.IMG"
data = pdr.open(pdr.get(url))
self.assertEqual(data.IMAGE.shape[0], 52224)
self.assertEqual(data.IMAGE.shape[1], 2532)
self.assertEqual(len(data.LABEL), 57)
def test_lroc_esm_wac_1(self):
url = "http://lroc.sese.asu.edu/data/LRO-L-LROC-3-CDR-V1.0/LROLRC_1015/DATA/ESM/2013092/WAC/M1119570719MC.IMG"
data = pdr.open(pdr.get(url))
self.assertEqual(data.IMAGE.shape[0], 3080)
self.assertEqual(data.IMAGE.shape[1], 1024)
self.assertEqual(len(data.LABEL), 59)
def test_lroc_sci_nac_1(self): # Large file (252Mb)
url = "http://lroc.sese.asu.edu/data/LRO-L-LROC-2-EDR-V1.0/LROLRC_0010/DATA/SCI/2012019/NAC/M181639328RE.IMG"
data = pdr.open(pdr.get(url))
# self.assertEqual(data.IMAGE.shape[0],2501)
# self.assertEqual(data.IMAGE.shape[1],2501)
# self.assertEqual(len(data.LABEL),41)
def test_lroc_sci_wac_1(self):
url = "http://lroc.sese.asu.edu/data/LRO-L-LROC-2-EDR-V1.0/LROLRC_0010/DATA/SCI/2012019/WAC/M181648212CE.IMG"
data = pdr.open(pdr.get(url))
# self.assertEqual(data.IMAGE.shape[0],2501)
# self.assertEqual(data.IMAGE.shape[1],2501)
# self.assertEqual(len(data.LABEL),41)
def test_lroc_bdr_nac_roi_1(self):
url = "http://lroc.sese.asu.edu/data/LRO-L-LROC-5-RDR-V1.0/LROLRC_2001/DATA/BDR/NAC_ROI/FLMSTEEDHIA/NAC_ROI_FLMSTEEDHIA_E023S3168_20M.IMG"
data = pdr.open(pdr.get(url))
# self.assertEqual(data.IMAGE.shape[0],2501)
# self.assertEqual(data.IMAGE.shape[1],2501)
# self.assertEqual(len(data.LABEL),41)
def test_lroc_bdr_wac_roi_1(self):
url = "http://lroc.sese.asu.edu/data/LRO-L-LROC-5-RDR-V1.0/LROLRC_2001/DATA/BDR/WAC_ROI/FARSIDE_DUSK/WAC_ROI_FARSIDE_DUSK_P900S0000_100M.IMG"
data = pdr.open(pdr.get(url))
# self.assertEqual(data.IMAGE.shape[0],2501)
# self.assertEqual(data.IMAGE.shape[1],2501)
# self.assertEqual(len(data.LABEL),41)
suite = unittest.TestLoader().loadTestsFromTestCase(TestLROC)
unittest.TextTestRunner(verbosity=2).run(suite)
| 42.954545
| 150
| 0.664286
| 560
| 3,780
| 4.357143
| 0.191071
| 0.165984
| 0.140164
| 0.177049
| 0.80082
| 0.778689
| 0.721311
| 0.721311
| 0.689344
| 0.689344
| 0
| 0.100486
| 0.183862
| 3,780
| 87
| 151
| 43.448276
| 0.690438
| 0.231746
| 0
| 0.282609
| 0
| 0.195652
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0.195652
| 1
| 0.23913
| false
| 0.043478
| 0.043478
| 0
| 0.326087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b16dbb8311928e69149235f737f6d4a556e3e81
| 208
|
py
|
Python
|
Opt_HC_CG/__init__.py
|
Carlosrlpzi/Opt_HC_CG
|
762a313ed16a7096ad3807c54a2d5968bac33bbf
|
[
"MIT"
] | null | null | null |
Opt_HC_CG/__init__.py
|
Carlosrlpzi/Opt_HC_CG
|
762a313ed16a7096ad3807c54a2d5968bac33bbf
|
[
"MIT"
] | null | null | null |
Opt_HC_CG/__init__.py
|
Carlosrlpzi/Opt_HC_CG
|
762a313ed16a7096ad3807c54a2d5968bac33bbf
|
[
"MIT"
] | null | null | null |
from Opt_HC_CG.hill import best_solution
from Opt_HC_CG.hill import distance_matrix
from Opt_HC_CG.grad_conj import cgm
from Opt_HC_CG.grad_conj import its_simetric
from Opt_HC_CG.grad_conj import is_pos_def
| 34.666667
| 44
| 0.879808
| 43
| 208
| 3.837209
| 0.418605
| 0.212121
| 0.272727
| 0.333333
| 0.709091
| 0.709091
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 208
| 5
| 45
| 41.6
| 0.87766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d2b3cba012531fd75634a32a014ff1a76afb7f7f
| 1,121
|
py
|
Python
|
tests/threadwrapper_test.py
|
csurfer/pypette
|
4e0bfcc56d36d7fb56d381ffcd6e5e58cb9b3ca1
|
[
"MIT"
] | 286
|
2017-10-28T10:08:42.000Z
|
2022-02-24T06:55:08.000Z
|
tests/threadwrapper_test.py
|
csurfer/pypette
|
4e0bfcc56d36d7fb56d381ffcd6e5e58cb9b3ca1
|
[
"MIT"
] | 14
|
2017-10-28T20:29:38.000Z
|
2021-09-13T16:14:12.000Z
|
tests/threadwrapper_test.py
|
csurfer/pypette
|
4e0bfcc56d36d7fb56d381ffcd6e5e58cb9b3ca1
|
[
"MIT"
] | 13
|
2017-10-29T03:17:19.000Z
|
2022-02-21T14:53:06.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Unit tests threadwrapper.py classes and methods.
Usage from git root:
>>> python setup.py test
"""
from pypette import Job, ThreadState, ThreadWrapper
def test_safe_run() -> None:
"""Tests run() thread method can be called safely."""
def dummy():
pass
def corrupt():
raise Exception('Corrupt')
tw = ThreadWrapper(Job(function=dummy))
assert tw.state == ThreadState.INIT
tw.run()
assert tw.state == ThreadState.SUCCESS
tw = ThreadWrapper(Job(function=corrupt))
assert tw.state == ThreadState.INIT
tw.run()
assert tw.state == ThreadState.FAILED
def test_safe_start():
"""Tests start() thread method can be called safely."""
def dummy():
pass
def corrupt():
raise Exception('Corrupt')
tw = ThreadWrapper(Job(function=dummy))
assert tw.state == ThreadState.INIT
tw.start()
assert tw.state == ThreadState.SUCCESS
tw = ThreadWrapper(Job(function=corrupt))
assert tw.state == ThreadState.INIT
tw.start()
assert tw.state == ThreadState.FAILED
| 21.150943
| 59
| 0.647636
| 136
| 1,121
| 5.308824
| 0.338235
| 0.088643
| 0.144044
| 0.265928
| 0.734072
| 0.717452
| 0.717452
| 0.717452
| 0.717452
| 0.717452
| 0
| 0.001151
| 0.224799
| 1,121
| 52
| 60
| 21.557692
| 0.829689
| 0.211418
| 0
| 0.888889
| 0
| 0
| 0.016185
| 0
| 0
| 0
| 0
| 0
| 0.296296
| 1
| 0.222222
| false
| 0.074074
| 0.037037
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
82ab57f64276aeb8cc1f1d5a4cbbb67c0325402c
| 151
|
py
|
Python
|
app/utils/__init__.py
|
thomas-michels/BullyAlgorithm
|
4d320b980faccc2a12cac857b37c9cbed037c0f1
|
[
"MIT"
] | null | null | null |
app/utils/__init__.py
|
thomas-michels/BullyAlgorithm
|
4d320b980faccc2a12cac857b37c9cbed037c0f1
|
[
"MIT"
] | null | null | null |
app/utils/__init__.py
|
thomas-michels/BullyAlgorithm
|
4d320b980faccc2a12cac857b37c9cbed037c0f1
|
[
"MIT"
] | null | null | null |
"""
Utils module
"""
from app.utils.id_generator import GenerateID
from app.utils.singleton import Singleton
from app.utils.time_now import show_time
| 18.875
| 45
| 0.807947
| 23
| 151
| 5.173913
| 0.521739
| 0.176471
| 0.302521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112583
| 151
| 7
| 46
| 21.571429
| 0.88806
| 0.07947
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
82b03e6903ef7687c71027c4572514a70f3a22a9
| 3,939
|
py
|
Python
|
apps/xformmanager/tests/xsd_checker.py
|
commtrack/commtrack-old-to-del
|
cc9c22754ac192a45483cef609bdcf09aa990340
|
[
"BSD-3-Clause"
] | 1
|
2017-05-19T07:23:00.000Z
|
2017-05-19T07:23:00.000Z
|
apps/xformmanager/tests/xsd_checker.py
|
commtrack/commtrack-old-to-del
|
cc9c22754ac192a45483cef609bdcf09aa990340
|
[
"BSD-3-Clause"
] | null | null | null |
apps/xformmanager/tests/xsd_checker.py
|
commtrack/commtrack-old-to-del
|
cc9c22754ac192a45483cef609bdcf09aa990340
|
[
"BSD-3-Clause"
] | null | null | null |
from xformmanager.tests.util import *
from xformmanager.xformdef import FormDef
from decimal import Decimal
from datetime import *
import unittest
class CompatibleTestCase(unittest.TestCase):
def setUp(self):
self.f1 = FormDef.from_file( get_file("data/versioning/base.xsd") )
def testSame(self):
""" testSame """
diff = self.f1.get_differences(self.f1)
self.assertTrue(diff.is_empty())
def testAddAndRemove(self):
""" testAddAndRemove """
self.f1 = FormDef.from_file( get_file("data/versioning/base.xsd") )
f2 = FormDef.from_file( get_file("data/versioning/field_added.xsd") )
diff = self.f1.get_differences(f2)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==3)
self.assertTrue(len(diff.fields_changed)==0)
self.assertTrue(len(diff.fields_removed)==0)
diff = f2.get_differences(self.f1)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_removed)==3)
self.assertTrue(len(diff.fields_changed)==0)
def testChangeEnumAddAndRemove(self):
""" testChangeEnumAddAndRemove """
f2 = FormDef.from_file( get_file("data/versioning/field_changed_enum.xsd") )
diff = self.f1.get_differences(f2)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_changed)==0)
self.assertTrue(len(diff.fields_removed)==0)
self.assertTrue(len(diff.types_changed)==1)
diff = f2.get_differences(self.f1)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_changed)==0)
self.assertTrue(len(diff.fields_removed)==0)
self.assertTrue(len(diff.types_changed)==1)
def testChangeLeafRepeats(self):
""" testChangeLeafRepeats """
# make repeatable
f2 = FormDef.from_file( get_file("data/versioning/field_changed_repeatable_leaf.xsd") )
diff = self.f1.get_differences(f2)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_removed)==0)
self.assertTrue(len(diff.fields_changed)==1)
# make not repeatable
diff = f2.get_differences(self.f1)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_removed)==0)
self.assertTrue(len(diff.fields_changed)==1)
def testChangeNodeRepeats(self):
""" testChangeNodeRepeats """
# make repeatable
f1 = FormDef.from_file( get_file("data/versioning/repeats.xsd") )
f2 = FormDef.from_file( get_file("data/versioning/field_changed_repeatable_node.xsd") )
diff = f1.get_differences(f2)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_removed)==0)
# when the parent becomes repeatable, both parent and child have changed
self.assertTrue(len(diff.fields_changed)==2)
# make not repeatable
diff = f2.get_differences(f1)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_removed)==0)
# when the parent becomes repeatable, both parent and child have changed
self.assertTrue(len(diff.fields_changed)==2)
def testChangeType(self):
""" testChangeType """
f2 = FormDef.from_file( get_file("data/versioning/field_changed_type.xsd") )
diff = self.f1.get_differences(f2)
self.assertFalse(diff.is_empty())
self.assertTrue(len(diff.fields_added)==0)
self.assertTrue(len(diff.fields_removed)==0)
self.assertTrue(len(diff.fields_changed)==3)
| 43.766667
| 95
| 0.670221
| 487
| 3,939
| 5.264887
| 0.129363
| 0.163807
| 0.192278
| 0.23752
| 0.799532
| 0.786271
| 0.786271
| 0.765211
| 0.735959
| 0.735959
| 0
| 0.018124
| 0.201574
| 3,939
| 89
| 96
| 44.258427
| 0.797138
| 0.084285
| 0
| 0.628571
| 0
| 0
| 0.078541
| 0.078541
| 0
| 0
| 0
| 0
| 0.557143
| 1
| 0.1
| false
| 0
| 0.071429
| 0
| 0.185714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
82bd5674b0c9d5b368e726e907669bf934f1ede4
| 166
|
py
|
Python
|
ytcc/fake_logger.py
|
StrikeNP/youtube-closed-captions
|
50f2c173bb98fbf41857517a5462e77b8c8f774f
|
[
"Apache-2.0"
] | 52
|
2017-06-14T20:06:55.000Z
|
2021-09-08T20:40:27.000Z
|
ytcc/fake_logger.py
|
StrikeNP/youtube-closed-captions
|
50f2c173bb98fbf41857517a5462e77b8c8f774f
|
[
"Apache-2.0"
] | 8
|
2018-09-03T05:15:43.000Z
|
2021-03-31T18:32:49.000Z
|
ytcc/fake_logger.py
|
StrikeNP/youtube-closed-captions
|
50f2c173bb98fbf41857517a5462e77b8c8f774f
|
[
"Apache-2.0"
] | 25
|
2018-10-31T17:12:35.000Z
|
2021-07-12T04:06:31.000Z
|
# -*- coding: UTF-8 -*-
class FakeLogger():
def debug(self, msg):
pass
def warning(self, msg):
pass
def error(self, msg):
pass
| 13.833333
| 27
| 0.506024
| 20
| 166
| 4.2
| 0.6
| 0.25
| 0.392857
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.349398
| 166
| 11
| 28
| 15.090909
| 0.768519
| 0.126506
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.428571
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
82d5846187b34e243fd2d759448efadb458441c6
| 18,021
|
py
|
Python
|
test/integration/ggrc/models/mixins/test_customattributable_preconditions_failed.py
|
j0gurt/ggrc-core
|
84662dc85aa8864c907eabe70b8efccf92298a1f
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-01-04T10:55:14.000Z
|
2019-01-04T10:55:14.000Z
|
test/integration/ggrc/models/mixins/test_customattributable_preconditions_failed.py
|
j0gurt/ggrc-core
|
84662dc85aa8864c907eabe70b8efccf92298a1f
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/integration/ggrc/models/mixins/test_customattributable_preconditions_failed.py
|
j0gurt/ggrc-core
|
84662dc85aa8864c907eabe70b8efccf92298a1f
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2018 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Integration tests for "preconditions_failed" CAV and CAable fields logic."""
from ggrc.models.assessment import Assessment
from integration.ggrc import TestCase
from integration.ggrc import generator
from integration.ggrc.models import factories
GENERATOR = generator.ObjectGenerator()
# pylint: disable=too-many-instance-attributes
class CustomAttributeMock(object):
"""Defines CustomAttributeDefinition and CustomAttributeValue objects"""
# pylint: disable=too-many-arguments
def __init__(self, attributable, attribute_type="Text", mandatory=False,
dropdown_parameters=None, global_=False, value=None):
self.attributable = attributable
self.attribute_type = attribute_type
self.mandatory = mandatory
self.dropdown_parameters = dropdown_parameters
self.attribute_value = value
self.global_ = global_
self.definition = self.make_definition()
self.value = self.make_value()
def make_definition(self):
"""Generate a custom attribute definition."""
definition = factories.CustomAttributeDefinitionFactory(
attribute_type=self.attribute_type,
definition_type=self.attributable.__class__.__name__,
definition_id=None if self.global_ else self.attributable.id,
mandatory=self.mandatory,
multi_choice_options=(self.dropdown_parameters[0]
if self.dropdown_parameters else None),
multi_choice_mandatory=(self.dropdown_parameters[1]
if self.dropdown_parameters else None),
)
return definition
def make_value(self):
"""Generate a custom attribute value."""
if self.attribute_value is not None:
value = factories.CustomAttributeValueFactory(
custom_attribute=self.definition,
attributable=self.attributable,
attribute_value=self.attribute_value,
)
else:
value = None
return value
# pylint: disable=super-on-old-class; TestCase is a new-style class
class TestPreconditionsFailed(TestCase):
"""Integration tests suite for preconditions_failed fields logic.
Failed cases."""
# pylint: disable=invalid-name
def setUp(self):
super(TestPreconditionsFailed, self).setUp()
self.assessment = factories.AssessmentFactory(
status=Assessment.PROGRESS_STATE,
)
def test_preconditions_failed_with_no_ca(self):
"""No preconditions failed with no CA restrictions."""
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
def test_preconditions_failed_with_no_mandatory_ca(self):
"""No preconditions failed with no CA-introduced restrictions."""
ca_text = CustomAttributeMock(self.assessment, attribute_type="Text",
value="")
ca_cbox = CustomAttributeMock(self.assessment, attribute_type="Checkbox",
value="")
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca_text.value.preconditions_failed)
self.assertFalse(ca_cbox.value.preconditions_failed)
def test_preconditions_failed_with_mandatory_empty_ca(self):
"""Preconditions failed if mandatory CA is empty."""
ca = CustomAttributeMock(self.assessment, mandatory=True, value="")
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(ca.value.preconditions_failed,
["value"])
def test_preconditions_failed_with_mandatory_filled_ca(self):
"""No preconditions failed if mandatory CA is filled."""
ca = CustomAttributeMock(self.assessment, mandatory=True, value="Foo")
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca.value.preconditions_failed)
def test_preconditions_failed_with_mandatory_empty_global_ca(self):
"""Preconditions failed if global mandatory CA is empty."""
ca = CustomAttributeMock(self.assessment, mandatory=True, global_=True,
value="")
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(ca.value.preconditions_failed, ["value"])
def test_preconditions_failed_with_mandatory_filled_global_ca(self):
"""No preconditions failed if global mandatory CA is filled."""
ca = CustomAttributeMock(self.assessment, mandatory=True, global_=True,
value="Foo")
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca.value.preconditions_failed)
def test_preconditions_failed_with_missing_mandatory_comment(self):
"""Preconditions failed if comment required by CA is missing."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,comment_required", "0,1"),
value="comment_required",
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(ca.value.preconditions_failed, ["comment"])
def test_preconditions_failed_with_missing_mandatory_evidence(self):
"""Preconditions failed if evidence required by CA is missing."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,evidence_required", "0,2"),
value="evidence_required",
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(ca.value.preconditions_failed, ["evidence"])
def test_preconditions_failed_with_missing_mandatory_url(self):
"""Preconditions failed if url required by CA is missing."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,url_required", "0,4"),
value="url_required",
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(ca.value.preconditions_failed, ["url"])
def test_preconditions_failed_with_mandatory_comment_and_evidence(self):
"""Preconditions failed with mandatory comment and evidence missing."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,comment_and_evidence_required", "0,3"),
value="comment_and_evidence_required",
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(set(ca.value.preconditions_failed),
{"comment", "evidence"})
def test_preconditions_failed_with_mandatory_url_and_evidence(self):
"""Preconditions failed with mandatory url and evidence missing."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,url_and_evidence_required", "0,6"),
value="url_and_evidence_required",
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(set(ca.value.preconditions_failed),
{"url", "evidence"})
def test_preconditions_failed_with_mandatory_url_and_comment(self):
"""Preconditions failed with mandatory url and comment missing."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,url_and_comment_required", "0,5"),
value="url_and_comment_required",
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(set(ca.value.preconditions_failed),
{"url", "comment"})
def test_preconditions_failed_with_mandatory_url_comment_and_evidence(self):
"""Preconditions failed with mandatory url, comment and evidence missing"""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,url_comment_and_evidence_required", "0,7"),
value="url_comment_and_evidence_required",
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(set(ca.value.preconditions_failed),
{"url", "comment", "evidence"})
def test_preconditions_failed_with_changed_value(self):
"""Preconditions failed and comment invalidated on update to CAV."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,comment_required", "0,1"),
value=None, # the value is made with generator to store revision too
)
_, ca.value = GENERATOR.generate_custom_attribute_value(
custom_attribute_id=ca.definition.id,
attributable=self.assessment,
attribute_value="comment_required",
)
comment = factories.CommentFactory(
assignee_type="Assignees",
description="Mandatory comment",
)
comment.custom_attribute_revision_upd({
"custom_attribute_revision_upd": {
"custom_attribute_value": {
"id": ca.value.id,
},
},
})
factories.RelationshipFactory(
source=self.assessment,
destination=comment,
)
# new CA value not requiring comment
self.assessment.custom_attribute_values = [{
"attribute_value": "foo",
"custom_attribute_id": ca.definition.id,
}]
GENERATOR.api.modify_object(self.assessment, {})
# new CA value requiring comment; the old comment should be considered
# invalid
self.assessment.custom_attribute_values = [{
"attribute_value": "comment_required",
"custom_attribute_id": ca.definition.id,
}]
GENERATOR.api.modify_object(self.assessment, {})
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
def test_preconditions_failed_with_missing_several_mandatory_evidences(self):
"""Preconditions failed if count(evidences) < count(evidences_required)."""
ca1 = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,evidence_required", "0,2"),
value="evidence_required"
)
ca2 = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,evidence_required", "0,2"),
value="evidence_required"
)
# only one evidence provided yet
evidence = factories.EvidenceFileFactory(
title="Mandatory evidence",
)
factories.RelationshipFactory(
source=self.assessment,
destination=evidence,
)
preconditions_failed = self.assessment.preconditions_failed
self.assertTrue(preconditions_failed)
self.assertEqual(ca1.value.preconditions_failed, ["evidence"])
self.assertEqual(ca2.value.preconditions_failed, ["evidence"])
class TestPreconditionsPassed(TestCase):
"""Integration tests suite for preconditions_failed fields logic.
Passed cases."""
# pylint: disable=invalid-name
def setUp(self):
super(TestPreconditionsPassed, self).setUp()
self.assessment = factories.AssessmentFactory(
status=Assessment.PROGRESS_STATE,
)
def test_preconditions_failed_with_no_ca(self):
"""No preconditions failed with no CA restrictions."""
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
def test_preconditions_failed_with_no_mandatory_ca(self):
"""No preconditions failed with no CA-introduced restrictions."""
ca_text = CustomAttributeMock(self.assessment, attribute_type="Text",
value="")
ca_cbox = CustomAttributeMock(self.assessment, attribute_type="Checkbox",
value="")
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca_text.value.preconditions_failed)
self.assertFalse(ca_cbox.value.preconditions_failed)
def test_preconditions_failed_with_mandatory_filled_ca(self):
"""No preconditions failed if mandatory CA is filled."""
ca = CustomAttributeMock(self.assessment, mandatory=True, value="Foo")
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca.value.preconditions_failed)
def test_preconditions_failed_with_mandatory_filled_global_ca(self):
"""No preconditions failed if global mandatory CA is filled."""
ca = CustomAttributeMock(self.assessment, mandatory=True, global_=True,
value="Foo")
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca.value.preconditions_failed)
def test_preconditions_failed_with_present_mandatory_comment(self):
"""No preconditions failed if comment required by CA is present."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,comment_required", "0,1"),
value=None, # the value is made with generator to store revision too
)
_, ca.value = GENERATOR.generate_custom_attribute_value(
custom_attribute_id=ca.definition.id,
attributable=self.assessment,
attribute_value="comment_required",
)
comment = factories.CommentFactory(
assignee_type="Assignees",
description="Mandatory comment",
)
comment.custom_attribute_revision_upd({
"custom_attribute_revision_upd": {
"custom_attribute_value": {
"id": ca.value.id,
},
},
})
factories.RelationshipFactory(
source=self.assessment,
destination=comment,
)
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca.value.preconditions_failed)
def test_preconditions_failed_with_present_mandatory_evidence(self):
"""No preconditions failed if evidence required by CA is present."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,evidence_required", "0,2"),
value="evidence_required",
)
evidence = factories.EvidenceFileFactory(
title="Mandatory evidence",
)
factories.RelationshipFactory(
source=self.assessment,
destination=evidence,
)
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca.value.preconditions_failed)
def test_preconditions_failed_with_several_mandatory_evidences(self):
"""No preconditions failed if evidences required by CAs are present"""
ca1 = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,evidence_required", "0,2"),
value="evidence_required"
)
ca2 = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,evidence_required", "0,2"),
value="evidence_required"
)
# only one evidence provided yet
evidence = factories.EvidenceFileFactory(
title="Mandatory evidence",
)
factories.RelationshipFactory(
source=self.assessment,
destination=evidence,
)
# the second evidence
evidence = factories.EvidenceFileFactory(
title="Second mandatory evidence",
)
factories.RelationshipFactory(
source=self.assessment,
destination=evidence,
)
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca1.value.preconditions_failed)
self.assertFalse(ca2.value.preconditions_failed)
def test_preconditions_failed_with_present_mandatory_url(self):
"""No preconditions failed if url required by CA is present."""
ca = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,url_required", "0,4"),
value="url_required",
)
url = factories.EvidenceUrlFactory(
title="Mandatory url",
)
factories.RelationshipFactory(
source=self.assessment,
destination=url,
)
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca.value.preconditions_failed)
def test_preconditions_failed_with_several_mandatory_urls(self):
"""No preconditions failed if URLs required by CAs are present"""
ca1 = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,url_required", "0,4"),
value="url_required"
)
ca2 = CustomAttributeMock(
self.assessment,
attribute_type="Dropdown",
dropdown_parameters=("foo,url_required", "0,4"),
value="url_required"
)
# only one URL provided yet
url = factories.EvidenceUrlFactory(
title="Mandatory URL",
)
factories.RelationshipFactory(
source=self.assessment,
destination=url,
)
# the second URL
url = factories.EvidenceUrlFactory(
title="Second mandatory URL",
)
factories.RelationshipFactory(
source=self.assessment,
destination=url,
)
preconditions_failed = self.assessment.preconditions_failed
self.assertFalse(preconditions_failed)
self.assertFalse(ca1.value.preconditions_failed)
self.assertFalse(ca2.value.preconditions_failed)
| 35.404715
| 79
| 0.711892
| 1,835
| 18,021
| 6.749864
| 0.089918
| 0.228565
| 0.135556
| 0.076861
| 0.829485
| 0.813903
| 0.805264
| 0.775715
| 0.743501
| 0.717019
| 0
| 0.003731
| 0.196881
| 18,021
| 508
| 80
| 35.474409
| 0.852069
| 0.135287
| 0
| 0.657682
| 0
| 0
| 0.089606
| 0.030236
| 0
| 0
| 0
| 0
| 0.134771
| 1
| 0.078167
| false
| 0.005391
| 0.010782
| 0
| 0.102426
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82daf82dd6555639500b95f0ba2c7a966cbced4c
| 1,159
|
py
|
Python
|
prediction/endpoints/auth_controller.py
|
EcoJesss/ecosystem-notebooks
|
095b2bc59b9749129a454a7b16c97e20d9484fd4
|
[
"MIT"
] | 2
|
2020-08-30T12:50:47.000Z
|
2020-11-24T12:59:43.000Z
|
prediction/endpoints/auth_controller.py
|
EcoJesss/ecosystem-notebooks
|
095b2bc59b9749129a454a7b16c97e20d9484fd4
|
[
"MIT"
] | null | null | null |
prediction/endpoints/auth_controller.py
|
EcoJesss/ecosystem-notebooks
|
095b2bc59b9749129a454a7b16c97e20d9484fd4
|
[
"MIT"
] | 2
|
2020-09-02T16:54:25.000Z
|
2021-06-20T20:30:11.000Z
|
# auth-controller
AUTH_LOGIN = {
"type": "post",
"endpoint": "/auth/login",
"call_message": "{type} {endpoint}",
"error_message": "{type} {endpoint} {response_code}"
}
AUTH_REFRESH_TOKEN = {
"type": "post",
"endpoint": "/auth/refresh-token",
"call_message": "{type} {endpoint}",
"error_message": "{type} {endpoint} {response_code}"
}
AUTH_REQUEST_PASS = {
"type": "post",
"endpoint": "/auth/request-pass",
"call_message": "{type} {endpoint}",
"error_message": "{type} {endpoint} {response_code}"
}
AUTH_RESET_PASS = {
"type": "post",
"endpoint": "/auth/reset-pass",
"call_message": "{type} {endpoint}",
"error_message": "{type} {endpoint} {response_code}"
}
AUTH_RESTORE_PASS = {
"type": "post",
"endpoint": "/auth/restore-pass",
"call_message": "{type} {endpoint}",
"error_message": "{type} {endpoint} {response_code}"
}
AUTH_SIGN_OUT = {
"type": "post",
"endpoint": "/auth/sign-out",
"call_message": "{type} {endpoint}",
"error_message": "{type} {endpoint} {response_code}"
}
AUTH_SIGN_UP = {
"type": "post",
"endpoint": "/auth/sign-up",
"call_message": "{type} {endpoint}",
"error_message": "{type} {endpoint} {response_code}"
}
| 26.953488
| 53
| 0.647972
| 133
| 1,159
| 5.390977
| 0.150376
| 0.214784
| 0.37099
| 0.195258
| 0.804742
| 0.637378
| 0.637378
| 0.637378
| 0.637378
| 0.637378
| 0
| 0
| 0.115617
| 1,159
| 43
| 54
| 26.953488
| 0.699512
| 0.012942
| 0
| 0.5
| 0
| 0
| 0.652668
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7d7825a5e829baf924d4a4d0407b77bfbc2557b5
| 96,265
|
py
|
Python
|
bricksrc/parameter.py
|
david-waterworth/Brick
|
e1435ddfe3cda13f2edca56c65b2295167472d52
|
[
"BSD-3-Clause"
] | 1
|
2021-07-06T06:10:56.000Z
|
2021-07-06T06:10:56.000Z
|
bricksrc/parameter.py
|
david-waterworth/Brick
|
e1435ddfe3cda13f2edca56c65b2295167472d52
|
[
"BSD-3-Clause"
] | null | null | null |
bricksrc/parameter.py
|
david-waterworth/Brick
|
e1435ddfe3cda13f2edca56c65b2295167472d52
|
[
"BSD-3-Clause"
] | null | null | null |
from rdflib import Literal
from .namespaces import BRICK, TAG, OWL
parameter_definitions = {
"Parameter": {
"tags": [TAG.Point, TAG.Parameter],
"subclasses": {
"Delay_Parameter": {
"tags": [TAG.Point, TAG.Delay, TAG.Parameter],
"subclasses": {
"Alarm_Delay_Parameter": {
"tags": [TAG.Point, TAG.Alarm, TAG.Delay, TAG.Parameter],
},
},
},
"Humidity_Parameter": {
"tags": [TAG.Point, TAG.Humidity, TAG.Parameter],
"subclasses": {
"High_Humidity_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.High,
TAG.Humidity,
TAG.Alarm,
TAG.Parameter,
],
},
"Low_Humidity_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Humidity,
TAG.Alarm,
TAG.Parameter,
],
},
},
},
"Load_Parameter": {
"tags": [TAG.Point, TAG.Load, TAG.Parameter],
"subclasses": {
"Max_Load_Setpoint": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Load,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Temperature_Parameter": {
"tags": [TAG.Point, TAG.Temperature, TAG.Parameter],
"subclasses": {
"High_Temperature_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.High,
TAG.Temperature,
TAG.Alarm,
TAG.Parameter,
],
},
"Low_Temperature_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Temperature,
TAG.Alarm,
TAG.Parameter,
],
},
"Low_Freeze_Protect_Temperature_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Freeze,
TAG.Protect,
TAG.Temperature,
TAG.Parameter,
],
},
"Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Sensor,
],
"subclasses": {
"Outside_Air_Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.Outside,
TAG.Air,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Parameter,
],
"subclasses": {
"Low_Outside_Air_Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Outside,
TAG.Air,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Parameter,
],
},
"High_Outside_Air_Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.High,
TAG.Outside,
TAG.Air,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Parameter,
],
},
},
},
},
},
},
},
"PID_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.PID],
"subclasses": {
"Gain_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.PID, TAG.Gain],
"subclasses": {
"Integral_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Integral,
],
"subclasses": {
"Supply_Air_Integral_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Integral,
TAG.Gain,
TAG.Parameter,
TAG.PID,
],
}
},
},
"Proportional_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Proportional,
],
"subclasses": {
"Supply_Air_Proportional_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Proportional,
TAG.Supply,
TAG.Air,
],
},
},
},
"Derivative_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Derivative,
],
},
},
},
"Step_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.Step],
"subclasses": {
"Differential_Pressure_Step_Parameter": {
"subclasses": {
"Chilled_Water_Differential_Pressure_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
}
},
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
},
"Static_Pressure_Step_Parameter": {
"subclasses": {
"Air_Static_Pressure_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
"subclasses": {
"Discharge_Air_Static_Pressure_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
},
},
}
},
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
},
"Temperature_Step_Parameter": {
"subclasses": {
"Air_Temperature_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
"subclasses": {
"Discharge_Air_Temperature_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
},
"Supply_Air_Temperature_Step_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Temperature_Step_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
},
},
}
},
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
},
},
},
"Time_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.Time],
"subclasses": {
"Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Time,
TAG.Integral,
],
"subclasses": {
"Air_Temperature_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Temperature,
TAG.Parameter,
TAG.PID,
TAG.Time,
TAG.Integral,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Cooling_Discharge_Air_Temperature_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Cooling_Supply_Air_Temperature_Integral_Time_Parameter": {
OWL.equivalentClass: BRICK[
"Cooling_Discharge_Air_Temperature_Integral_Time_Parameter"
],
"tags": [
TAG.Point,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Heating_Discharge_Air_Temperature_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Heating_Supply_Air_Temperature_Integral_Time_Parameter": {
OWL.equivalentClass: BRICK[
"Heating_Discharge_Air_Temperature_Integral_Time_Parameter"
],
"tags": [
TAG.Point,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Hot_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Chilled_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Discharge_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Exhaust_Air_Flow_Integral_Time_Parameter": {
"subclasses": {
"Exhaust_Air_Stack_Flow_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Stack,
TAG.Flow,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
}
},
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Flow,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Static_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Discharge_Air_Static_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Air_Static_Pressure_Integral_Time_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Static_Pressure_Integral_Time_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Supply_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Temperature_Integral_Time_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Derivative_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Time,
TAG.Derivative,
],
},
},
},
"Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Differential_Pressure_Proportional_Band": {
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.PID,
],
"subclasses": {
"Hot_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Chilled_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Discharge_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Discharge_Air_Temperature_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Heating_Discharge_Air_Temperature_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Cooling_Discharge_Air_Temperature_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Supply_Air_Temperature_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Temperature_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Cooling_Supply_Air_Temperature_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Cooling_Discharge_Air_Temperature_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Heating_Supply_Air_Temperature_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Heating_Discharge_Air_Temperature_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Exhaust_Air_Flow_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Flow,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Exhaust_Air_Stack_Flow_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Stack,
TAG.Flow,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Static_Pressure_Proportional_Band_Parameter": {
"subclasses": {
"Discharge_Air_Static_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Exhaust_Air_Static_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Air_Static_Pressure_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Static_Pressure_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Temperature_Proportional_Band_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Discharge_Water_Temperature_Proportional_Band_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Water,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
},
},
"Tolerance_Parameter": {
"tags": [TAG.Point, TAG.Tolerance, TAG.Parameter],
"subclasses": {
"Humidity_Tolerance_Parameter": {
"tags": [TAG.Point, TAG.Tolerance, TAG.Parameter, TAG.Humidity],
"parents": [BRICK.Humidity_Parameter],
},
"Temperature_Tolerance_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Tolerance,
TAG.Parameter,
TAG.Temperature,
],
},
},
},
"Limit": {
"tags": [TAG.Point, TAG.Parameter, TAG.Limit],
"subclasses": {
"Close_Limit": {
"tags": [TAG.Point, TAG.Close, TAG.Parameter, TAG.Limit],
},
"Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"parents": [BRICK.Max_Limit],
},
"Min_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"parents": [BRICK.Min_Limit],
},
},
},
"Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Discharge_Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"subclasses": {
"Max_Discharge_Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [
BRICK.Max_Temperature_Setpoint_Limit
],
},
"Min_Discharge_Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [
BRICK.Min_Temperature_Setpoint_Limit
],
},
},
},
},
},
"Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Current_Limit": {
"tags": [TAG.Point, TAG.Current, TAG.Limit, TAG.Parameter],
},
"Position_Limit": {
"tags": [TAG.Point, TAG.Position, TAG.Limit],
"subclasses": {
"Max_Position_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Position,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Max_Limit],
},
"Min_Position_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Position,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Min_Limit],
},
},
},
"Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Fresh_Air_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Fresh,
TAG.Air,
TAG.Limit,
TAG.Setpoint,
],
"subclasses": {
"Min_Fresh_Air_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Fresh,
TAG.Air,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Min_Limit],
},
},
},
"Ventilation_Air_Flow_Ratio_Limit": {
"tags": [
TAG.Point,
TAG.Ventilation,
TAG.Air,
TAG.Ratio,
TAG.Limit,
],
},
"Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"High_Static_Pressure_Cutout_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.High,
TAG.Static,
TAG.Pressure,
TAG.Cutout,
TAG.Limit,
TAG.Setpoint,
],
},
},
},
"Max_Limit": {
"tags": [TAG.Point, TAG.Max, TAG.Limit, TAG.Parameter],
"subclasses": {
"Max_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Temperature_Parameter],
},
"Max_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
},
},
},
},
"Min_Limit": {
"tags": [TAG.Point, TAG.Min, TAG.Limit, TAG.Parameter],
"subclasses": {
"Min_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Temperature_Parameter],
},
"Min_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Outside_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Outside,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
},
},
},
},
},
},
},
}
}
| 53.067806
| 111
| 0.203262
| 3,524
| 96,265
| 5.31328
| 0.018729
| 0.056452
| 0.096774
| 0.120968
| 0.938421
| 0.922185
| 0.902104
| 0.874653
| 0.813822
| 0.744232
| 0
| 0
| 0.749213
| 96,265
| 1,813
| 112
| 53.097077
| 0.775578
| 0
| 0
| 0.791391
| 0
| 0
| 0.088662
| 0.073131
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001104
| 0
| 0.001104
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7d8fcddf2ffcb350697735a5e19883bed5817e43
| 3,435
|
py
|
Python
|
tests/test_response.py
|
tarsil/django-loguru
|
f7c68ac23eaa88cdcb76ea73b0d4f1db93b452dd
|
[
"MIT"
] | 7
|
2021-11-29T22:14:21.000Z
|
2022-02-16T11:50:11.000Z
|
tests/test_response.py
|
tarsil/django-loguru
|
f7c68ac23eaa88cdcb76ea73b0d4f1db93b452dd
|
[
"MIT"
] | 1
|
2022-02-06T16:15:46.000Z
|
2022-02-06T16:15:46.000Z
|
tests/test_response.py
|
tarsil/django-loguru
|
f7c68ac23eaa88cdcb76ea73b0d4f1db93b452dd
|
[
"MIT"
] | 1
|
2022-01-03T12:07:48.000Z
|
2022-01-03T12:07:48.000Z
|
import pytest
from django.test import TestCase, override_settings
from django.urls import reverse
from django_webtest import WebTest
from .base import TESTING_MIDDLEWARE
class TestHttpResponseWithoutUserMiddleware(WebTest):
csrf_checks = False
direct = reverse('direct')
direct_params = reverse('direct-param')
def test_middleware_simple_get_request(self):
response = self.app.get(self.direct)
assert response.status_code == 200
def test_middleware_simple_post_request(self):
response = self.app.post(self.direct_params, params={'data': 'data'})
assert response.status_code == 200
def test_middleware_simple_put_request(self):
response = self.app.put(self.direct_params, params={'data': 'data'})
assert response.status_code == 200
def test_middleware_simple_delete_request(self):
response = self.app.delete(self.direct_params)
assert response.status_code == 200
def test_middleware_simple_get_with_query_string_request(self):
response = self.app.get(self.direct_params, params={'data': 'data'})
assert response.status_code == 200
def test_middleware_simple_post_with_query_string_request(self):
response = self.app.post(f'{self.direct_params}?data=data', params={'data_json': 'data_json'})
assert response.status_code == 200
def test_middleware_simple_put_with_query_string_request(self):
response = self.app.put(f'{self.direct_params}?data=data', params={'data_json': 'data_json'})
assert response.status_code == 200
def test_middleware_simple_delete_with_query_string_request(self):
response = self.app.delete(self.direct_params, {'data': 'data'})
assert response.status_code == 200
@override_settings(MIDDLEWARE=TESTING_MIDDLEWARE)
class TestResponseFunctionWithUser(WebTest):
csrf_checks = False
direct = reverse('drf-direct')
direct_params = reverse('drf-direct-params')
def test_middleware_simple_get_request(self):
response = self.app.get(self.direct_params)
assert response.status_code == 200
def test_middleware_simple_post_request(self):
response = self.app.post(self.direct_params, params={'data': 'data'})
assert response.status_code == 200
def test_middleware_simple_put_request(self):
response = self.app.put(self.direct_params, params={'data': 'data'})
assert response.status_code == 200
def test_middleware_simple_delete_request(self):
response = self.app.delete(self.direct_params)
assert response.status_code == 200
def test_middleware_simple_get_with_query_string_request(self):
response = self.app.get(self.direct_params, {'data': 'data'})
assert response.status_code == 200
def test_middleware_simple_post_with_query_string_request(self):
response = self.app.post(f'{self.direct_params}?data=data', params={'data_json': 'data_json'})
assert response.status_code == 200
def test_middleware_simple_put_with_query_string_request(self):
response = self.app.put(f'{self.direct_params}?data=data', params={'data_json': 'data_json'})
assert response.status_code == 200
def test_middleware_simple_delete_with_query_string_request(self):
response = self.app.delete(f'{self.direct_params}', {'data': 'data'})
assert response.status_code == 200
| 34.69697
| 102
| 0.72198
| 440
| 3,435
| 5.327273
| 0.1
| 0.09215
| 0.116041
| 0.156997
| 0.854522
| 0.854522
| 0.824232
| 0.824232
| 0.824232
| 0.824232
| 0
| 0.016872
| 0.171761
| 3,435
| 99
| 103
| 34.69697
| 0.80703
| 0
| 0
| 0.709677
| 0
| 0
| 0.093423
| 0.034924
| 0
| 0
| 0
| 0
| 0.258065
| 1
| 0.258065
| false
| 0
| 0.080645
| 0
| 0.467742
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
81afa8f8e3a8bdb8416f6429a79815ac5994c0de
| 10,867
|
py
|
Python
|
rdkit/ML/Data/UnitTestQuantize.py
|
darkreactions/rdkit
|
0c388029c1f9386d832f6c321e59a11589c373d8
|
[
"PostgreSQL"
] | null | null | null |
rdkit/ML/Data/UnitTestQuantize.py
|
darkreactions/rdkit
|
0c388029c1f9386d832f6c321e59a11589c373d8
|
[
"PostgreSQL"
] | null | null | null |
rdkit/ML/Data/UnitTestQuantize.py
|
darkreactions/rdkit
|
0c388029c1f9386d832f6c321e59a11589c373d8
|
[
"PostgreSQL"
] | 2
|
2017-12-04T02:28:18.000Z
|
2018-11-29T01:18:46.000Z
|
#
# Copyright (C) 2001 greg Landrum
#
""" unit testing code for variable quantization
"""
import unittest
from rdkit import RDConfig
from rdkit.ML.Data import Quantize
from rdkit.six.moves import map
class TestCase(unittest.TestCase):
def setUp(self):
#print '\n%s: '%self.shortDescription(),
pass
def testOneSplit1(self):
""" simple case (clear division)
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.4, 0), (1.4, 0), (1.6, 0), (2., 1), (2.1, 1), (2.2, 1),
(2.3, 1)]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize.FindVarQuantBound(varValues, resCodes, nPossibleRes)
target = (1.8, 0.97095)
assert list(map(lambda x,y:Quantize.feq(x,y,1e-4),res,target))==[1,1],\
'result comparison failed: %s != %s'%(res,target)
def testOneSplit2(self):
""" some noise
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.4, 0), (1.4, 1), (1.6, 0), (2., 1), (2.1, 1), (2.2, 1),
(2.3, 1)]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize.FindVarQuantBound(varValues, resCodes, nPossibleRes)
target = (1.8, 0.60999)
assert list(map(lambda x,y:Quantize.feq(x,y,1e-4),res,target))==[1,1],\
'result comparison failed: %s != %s'%(res,target)
def testOneSplit3(self):
""" optimal division not possibe
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.4, 2), (1.4, 2), (1.6, 2), (2., 2), (2.1, 1), (2.2, 1),
(2.3, 1)]
varValues, resCodes = zip(*d)
nPossibleRes = 3
res = Quantize.FindVarQuantBound(varValues, resCodes, nPossibleRes)
target = (1.3, 0.88129)
assert list(map(lambda x,y:Quantize.feq(x,y,1e-4),res,target))==[1,1],\
'result comparison failed: %s != %s'%(res,target)
def testOneSplit4(self):
""" lots of duplicates
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.2, 1), (1.4, 0), (1.4, 0), (1.6, 0), (2., 1), (2.1, 1),
(2.1, 1), (2.1, 1), (2.1, 1), (2.2, 1), (2.3, 1)]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize.FindVarQuantBound(varValues, resCodes, nPossibleRes)
target = (1.8, 0.68939)
assert list(map(lambda x,y:Quantize.feq(x,y,1e-4),res,target))==[1,1],\
'result comparison failed: %s != %s'%(res,target)
def testOneSplit5(self):
""" same as testOneSplit1 data, but out of order
"""
d = [(1., 0), (1.1, 0), (2.2, 1), (1.2, 0), (1.6, 0), (1.4, 0), (2., 1), (2.1, 1), (1.4, 0),
(2.3, 1)]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize.FindVarQuantBound(varValues, resCodes, nPossibleRes)
target = (1.8, 0.97095)
assert list(map(lambda x,y:Quantize.feq(x,y,1e-4),res,target))==[1,1],\
'result comparison failed: %s != %s'%(res,target)
def testMultSplit1(self):
""" simple dual split
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.4, 2), (1.4, 2), (1.6, 2), (2., 2), (2.1, 1), (2.1, 1),
(2.1, 1), (2.2, 1), (2.3, 1)]
varValues, resCodes = zip(*d)
nPossibleRes = 3
res = Quantize.FindVarMultQuantBounds(varValues, 2, resCodes, nPossibleRes)
target = ([1.3, 2.05], 1.55458)
assert min(map(lambda x,y:Quantize.feq(x,y,1e-4),res[0],target[0]))==1,\
'split bound comparison failed: %s != %s'%(res[0],target[0])
assert Quantize.feq(res[1],target[1],1e-4),\
'InfoGain comparison failed: %s != %s'%(res[1],target[1])
def testMultSplit2(self):
""" same test as testMultSplit1, but out of order
"""
d = [(1., 0), (2.1, 1), (1.1, 0), (1.2, 0), (1.4, 2), (1.6, 2), (2., 2), (1.4, 2), (2.1, 1),
(2.2, 1), (2.1, 1), (2.3, 1)]
varValues, resCodes = zip(*d)
nPossibleRes = 3
res = Quantize.FindVarMultQuantBounds(varValues, 2, resCodes, nPossibleRes)
target = ([1.3, 2.05], 1.55458)
assert Quantize.feq(res[1],target[1],1e-4),\
'InfoGain comparison failed: %s != %s'%(res[1],target[1])
assert min(map(lambda x,y:Quantize.feq(x,y,1e-4),res[0],target[0]))==1,\
'split bound comparison failed: %s != %s'%(res[0],target[0])
def testMultSplit3(self):
""" 4 possible results
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.4, 2), (1.4, 2), (1.6, 2), (2., 2), (2.1, 1), (2.1, 1),
(2.1, 1), (2.2, 1), (2.3, 1), (3.0, 3), (3.1, 3), (3.2, 3), (3.3, 3)]
varValues, resCodes = zip(*d)
nPossibleRes = 4
res = Quantize.FindVarMultQuantBounds(varValues, 3, resCodes, nPossibleRes)
target = ([1.30, 2.05, 2.65], 1.97722)
assert Quantize.feq(res[1],target[1],1e-4),\
'InfoGain comparison failed: %s != %s'%(res[1],target[1])
assert min(map(lambda x,y:Quantize.feq(x,y,1e-4),res[0],target[0]))==1,\
'split bound comparison failed: %s != %s'%(res[0],target[0])
def testMultSplit4(self):
""" dual valued, with an island
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.4, 1), (1.4, 1), (1.6, 1), (2., 1), (2.1, 0), (2.1, 0),
(2.1, 0), (2.2, 0), (2.3, 0)]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize.FindVarMultQuantBounds(varValues, 2, resCodes, nPossibleRes)
target = ([1.3, 2.05], .91830)
assert Quantize.feq(res[1],target[1],1e-4),\
'InfoGain comparison failed: %s != %s'%(res[1],target[1])
assert min(map(lambda x,y:Quantize.feq(x,y,1e-4),res[0],target[0]))==1,\
'split bound comparison failed: %s != %s'%(res[0],target[0])
def testMultSplit5(self):
""" dual valued, with an island, a bit noisy
"""
d = [(1., 0), (1.1, 0), (1.2, 0), (1.4, 1), (1.4, 0), (1.6, 1), (2., 1), (2.1, 0), (2.1, 0),
(2.1, 0), (2.2, 1), (2.3, 0)]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize.FindVarMultQuantBounds(varValues, 2, resCodes, nPossibleRes)
target = ([1.3, 2.05], .34707)
assert Quantize.feq(res[1],target[1],1e-4),\
'InfoGain comparison failed: %s != %s'%(res[1],target[1])
assert min(map(lambda x,y:Quantize.feq(x,y,1e-4),res[0],target[0]))==1,\
'split bound comparison failed: %s != %s'%(res[0],target[0])
def test9NewSplits(self):
"""
"""
d = [(0, 0),
(1, 1),
(2, 0), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 2], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 2], str(res))
d = [(0, 1),
(1, 0),
(2, 1), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 2], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 2], str(res))
d = [(0, 0),
(0, 0),
(1, 1),
(1, 1),
(2, 0),
(2, 1), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
d = [(0, 0),
(0, 1),
(1, 1),
(1, 1),
(2, 0),
(2, 1), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
d = [(0, 0),
(0, 0),
(1, 0),
(1, 1),
(2, 0),
(2, 1), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
d = [(0, 0),
(0, 0),
(1, 0),
(1, 0),
(2, 1),
(2, 1), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [4], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [4], str(res))
d = [(0, 0),
(0, 0),
(1, 1),
(1, 1),
(2, 1),
(2, 1), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2], str(res))
d = [(0, 0),
(0, 0),
(1, 0),
(1, 0),
(2, 0),
(2, 0), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [], str(res))
d = [(0, 0),
(0, 1),
(1, 0),
(1, 1),
(2, 0),
(2, 0), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [2, 4], str(res))
d = [(1, 0),
(2, 1),
(2, 1),
(3, 1),
(3, 1),
(3, 1),
(4, 0),
(4, 1),
(4, 1), ]
varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 6], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 6], str(res))
d = [(1, 1.65175902843, 0), (2, 1.89935600758, 0), (3, 1.89935600758, 1), (4, 1.89935600758, 1),
(5, 2.7561609745, 1), (6, 2.7561609745, 1), (7, 2.7561609745, 1), (8, 2.7561609745, 1),
(9, 3.53454303741, 1), (10, 3.53454303741, 1), (11, 3.53454303741, 1),
(12, 3.53454303741, 1), (13, 3.53454303741, 1)]
_, varValues, resCodes = zip(*d)
nPossibleRes = 2
res = Quantize._NewPyFindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 4], str(res))
res = Quantize._FindStartPoints(varValues, resCodes, len(d))
self.assertTrue(res == [1, 4], str(res))
def testGithubIssue18(self):
d = [0, 1, 2, 3, 4]
a = [0, 0, 1, 1, 1]
tpl = Quantize.FindVarMultQuantBounds(d, 1, a, 2)
d2 = [(x, ) for x in d]
self.assertRaises(ValueError, lambda: Quantize.FindVarMultQuantBounds(d2, 1, a, 2))
self.assertRaises(ValueError, lambda: Quantize._FindStartPoints(d2, a, len(d2)))
if __name__ == '__main__':
unittest.main()
| 35.864686
| 100
| 0.541364
| 1,588
| 10,867
| 3.684509
| 0.086272
| 0.019826
| 0.01128
| 0.078961
| 0.81986
| 0.795591
| 0.783969
| 0.777987
| 0.765681
| 0.764656
| 0
| 0.11126
| 0.245698
| 10,867
| 302
| 101
| 35.983444
| 0.602538
| 0.041594
| 0
| 0.758197
| 0
| 0
| 0.053533
| 0
| 0
| 0
| 0
| 0
| 0.159836
| 1
| 0.053279
| false
| 0.004098
| 0.016393
| 0
| 0.07377
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81ca3157d7b9d1e9075efd479f11c98d4b14710f
| 2,846
|
py
|
Python
|
catalyst/contrib/modules/pooling.py
|
ferrine/catalyst
|
b5bc4fb5f692e1fde2d95ef4a534296dccd0f717
|
[
"MIT"
] | null | null | null |
catalyst/contrib/modules/pooling.py
|
ferrine/catalyst
|
b5bc4fb5f692e1fde2d95ef4a534296dccd0f717
|
[
"MIT"
] | null | null | null |
catalyst/contrib/modules/pooling.py
|
ferrine/catalyst
|
b5bc4fb5f692e1fde2d95ef4a534296dccd0f717
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from catalyst.contrib.registry import MODULES
class GlobalAvgPool2d(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
h, w = x.shape[2:]
return F.avg_pool2d(input=x, kernel_size=(h, w))
@staticmethod
def out_features(in_features):
return in_features
class GlobalMaxPool2d(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
h, w = x.shape[2:]
return F.max_pool2d(input=x, kernel_size=(h, w))
@staticmethod
def out_features(in_features):
return in_features
class GlobalConcatPool2d(nn.Module):
def __init__(self):
super().__init__()
self.avg = GlobalAvgPool2d()
self.max = GlobalMaxPool2d()
def forward(self, x):
return torch.cat([self.avg(x), self.max(x)], 1)
@staticmethod
def out_features(in_features):
return in_features * 2
class GlobalAttnPool2d(nn.Module):
def __init__(self, in_features, activation_fn="Tanh"):
super().__init__()
activation_fn = MODULES.get_if_str(activation_fn)
self.attn = nn.Sequential(
nn.Conv2d(
in_features, 1, kernel_size=1, stride=1, padding=0, bias=False
), activation_fn()
)
def forward(self, x):
h, w = x.shape[2:]
x_a = self.attn(x)
x = x * x_a
x = torch.sum(x, dim=[-2, -1])
return x
@staticmethod
def out_features(in_features):
return in_features
class GlobalAvgAttnPool2d(nn.Module):
def __init__(self, in_features, activation_fn="Tanh"):
super().__init__()
self.avg = GlobalAvgPool2d()
self.attn = GlobalAttnPool2d(in_features, activation_fn)
def forward(self, x):
return torch.cat([self.avg(x), self.attn(x)], 1)
@staticmethod
def out_features(in_features):
return in_features * 2
class GlobalMaxAttnPool2d(nn.Module):
def __init__(self, in_features, activation_fn="Tanh"):
super().__init__()
self.max = GlobalMaxPool2d()
self.attn = GlobalAttnPool2d(in_features, activation_fn)
def forward(self, x):
return torch.cat([self.max(x), self.attn(x)], 1)
@staticmethod
def out_features(in_features):
return in_features * 2
class GlobalConcatAttnPool2d(nn.Module):
def __init__(self, in_features, activation_fn="Tanh"):
super().__init__()
self.avg = GlobalAvgPool2d()
self.max = GlobalMaxPool2d()
self.attn = GlobalAttnPool2d(in_features, activation_fn)
def forward(self, x):
return torch.cat([self.avg(x), self.max(x), self.attn(x)], 1)
@staticmethod
def out_features(in_features):
return in_features * 3
| 25.410714
| 78
| 0.629656
| 360
| 2,846
| 4.688889
| 0.169444
| 0.130332
| 0.045616
| 0.062204
| 0.774882
| 0.774882
| 0.767773
| 0.756517
| 0.725118
| 0.7109
| 0
| 0.016878
| 0.250527
| 2,846
| 111
| 79
| 25.63964
| 0.774496
| 0
| 0
| 0.654321
| 0
| 0
| 0.005622
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.259259
| false
| 0
| 0.049383
| 0.135802
| 0.567901
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
c48c57cff77587b480a497a6da07a2b869653b3d
| 7,787
|
py
|
Python
|
trr265/gbe/rtt/scoring.py
|
hgzech/trr265
|
11807677d782ce5ef9e0e59e10be55f1da4e3371
|
[
"Apache-2.0"
] | null | null | null |
trr265/gbe/rtt/scoring.py
|
hgzech/trr265
|
11807677d782ce5ef9e0e59e10be55f1da4e3371
|
[
"Apache-2.0"
] | 1
|
2021-11-18T16:42:24.000Z
|
2021-11-18T17:11:09.000Z
|
trr265/gbe/rtt/scoring.py
|
hgzech/trr265
|
11807677d782ce5ef9e0e59e10be55f1da4e3371
|
[
"Apache-2.0"
] | null | null | null |
# AUTOGENERATED! DO NOT EDIT! File to edit: notebooks/11_gbe.rtt.scoring.ipynb (unless otherwise specified).
__all__ = ['get_percentage_gamble', 'get_perc_gamble_predicted_sep_r', 'get_perc_gamble_predicted_sep',
'get_perc_gamble_predicted_joint_r', 'get_perc_gamble_predicted_joint', 'get_percentage_gamble',
'get_perc_gamble_predicted_sep_r', 'get_perc_gamble_predicted_sep', 'get_perc_gamble_predicted_joint_r',
'get_perc_gamble_predicted_joint']
# Cell
from .data_provider import RTTDataProvider
import pandas as pd
import numpy as np
from scipy import stats
import biuR.wrapper
# Cell
def get_percentage_gamble(df):
percentage_gamble = df.groupby(['gbe_index','trial_type'])['gambled'].mean().unstack()
percentage_gamble = percentage_gamble.add_prefix('perc_gamble_')
return percentage_gamble
# Cell
def get_perc_gamble_predicted_sep_r(df):
R = biuR.wrapper.R()
p = R("""
library(lmerTest)
library(ggeffects)
# Running the model
control=glmerControl(optimizer = "bobyqa", optCtrl=list(maxfun=1e6))
m = glmer(is_gamble ~ 1 + (1 | participant), data=df, family=binomial, control = control, na.action = na.exclude)
# Extracting predicted values
ggpredict(m, terms=c("participant"), type="re",ci.lvl = NA)
""",push=dict(df=df))
return p
def get_perc_gamble_predicted_sep(df):
df['is_gamble'] = df.gambled.astype(int)
dfs = []
# Looping through trial types
for trial_type in ['win','loss','mixed']:
session_dfs = []
# Looping through sessions
for session in [2,1]:
# Extracting data for specific session and trial type
_df = df.query('(session_number==@session) and (trial_type==@trial_type)')
# Predicting scores
predicted = get_perc_gamble_predicted_sep_r(_df)
# Labeling variables
predicted.columns = ['participant','perc_gamble_sep_%s'%trial_type,'session']
predicted['session'] = session
predicted['gbe_index'] = predicted.participant.astype(str) + '_%03d'%session
predicted = predicted.set_index('gbe_index')['perc_gamble_sep_%s'%trial_type].to_frame()
# Combining everything into one dataframe
session_dfs.append(predicted)
dfs.append(pd.concat(session_dfs))
perc_gamble_sep = pd.concat(dfs, axis = 1)
return perc_gamble_sep
# Cell
def get_perc_gamble_predicted_joint_r(df):
R = biuR.wrapper.R()
p = R("""
library(lmerTest)
library(ggeffects)
# Running the model
control=glmerControl(optimizer = "bobyqa", optCtrl=list(maxfun=1e6))
m = glmer(is_gamble ~ 1 + (1 | participant/session), data=df, family=binomial, control = control, na.action = na.exclude)
# Extracting predicted values
ggpredict(m, terms=c("participant","session"), type="re",ci.lvl = NA)
""",push=dict(df=df))
m = R("""m""")
return p, m
def get_perc_gamble_predicted_joint(df):
df['is_gamble'] = df.gambled.astype(int)
dfs = []
ms = {}
# Looping through trial types
for trial_type in ['win','loss','mixed']:
# Extracting data for specific trial type
_df = df.query('(trial_type==@trial_type)')
_df['session'] = _df.session_number.astype(str) # making session a factor
# Predicting scores
predicted, m = get_perc_gamble_predicted_joint_r(_df)
# Labeling variables
predicted.columns = ['participant','perc_gamble_joint_%s'%trial_type,'session']
#predicted['session'] = session
predicted['gbe_index'] = predicted.participant.astype(str) + predicted.session.apply(lambda x: '_%03d'%int(float(x))).astype(str)
predicted = predicted.set_index('gbe_index')['perc_gamble_joint_%s'%trial_type].to_frame()
# Combining everything into one dataframe
dfs.append(predicted)
ms[trial_type] = m
perc_predicted_sep_trial = pd.concat(dfs, axis = 1)
# Removing sessions that were not in initial dataframe
perc_predicted_sep_trial = perc_predicted_sep_trial.loc[df.gbe_index.unique()]
return perc_predicted_sep_trial, ms
# Cell
from .data_provider import RTTDataProvider
import pandas as pd
import numpy as np
from scipy import stats
import biuR.wrapper
# Cell
def get_percentage_gamble(df):
percentage_gamble = df.groupby(['gbe_index','trial_type'])['gambled'].mean().unstack()
percentage_gamble = percentage_gamble.add_prefix('perc_gamble_')
return percentage_gamble
# Cell
def get_perc_gamble_predicted_sep_r(df):
R = biuR.wrapper.R()
p = R("""
library(lmerTest)
library(ggeffects)
# Running the model
control=glmerControl(optimizer = "bobyqa", optCtrl=list(maxfun=1e6))
m = glmer(is_gamble ~ 1 + (1 | participant), data=df, family=binomial, control = control, na.action = na.exclude)
# Extracting predicted values
ggpredict(m, terms=c("participant"), type="re",ci.lvl = NA)
""",push=dict(df=df))
return p
def get_perc_gamble_predicted_sep(df):
df['is_gamble'] = df.gambled.astype(int)
dfs = []
# Looping through trial types
for trial_type in ['win','loss','mixed']:
session_dfs = []
# Looping through sessions
for session in [2,1]:
# Extracting data for specific session and trial type
_df = df.query('(session_number==@session) and (trial_type==@trial_type)')
# Predicting scores
predicted = get_perc_gamble_predicted_sep_r(_df)
# Labeling variables
predicted.columns = ['participant','perc_gamble_sep_%s'%trial_type,'session']
predicted['session'] = session
predicted['gbe_index'] = predicted.participant.astype(str) + '_%03d'%session
predicted = predicted.set_index('gbe_index')['perc_gamble_sep_%s'%trial_type].to_frame()
# Combining everything into one dataframe
session_dfs.append(predicted)
dfs.append(pd.concat(session_dfs))
perc_gamble_sep = pd.concat(dfs, axis = 1)
return perc_gamble_sep
# Cell
def get_perc_gamble_predicted_joint_r(df):
R = biuR.wrapper.R()
p = R("""
library(lmerTest)
library(ggeffects)
# Running the model
control=glmerControl(optimizer = "bobyqa", optCtrl=list(maxfun=1e6))
m = glmer(is_gamble ~ 1 + (1 | participant/session), data=df, family=binomial, control = control, na.action = na.exclude)
# Extracting predicted values
ggpredict(m, terms=c("participant","session"), type="re",ci.lvl = NA)
""",push=dict(df=df))
m = R("""m""")
return p, m
def get_perc_gamble_predicted_joint(df):
df['is_gamble'] = df.gambled.astype(int)
dfs = []
ms = {}
# Looping through trial types
for trial_type in ['win','loss','mixed']:
# Extracting data for specific trial type
_df = df.query('(trial_type==@trial_type)')
_df['session'] = _df.session_number.astype(str) # making session a factor
# Predicting scores
predicted, m = get_perc_gamble_predicted_joint_r(_df)
# Labeling variables
predicted.columns = ['participant','perc_gamble_joint_%s'%trial_type,'session']
#predicted['session'] = session
predicted['gbe_index'] = predicted.participant.astype(str) + predicted.session.apply(lambda x: '_%03d'%int(float(x))).astype(str)
predicted = predicted.set_index('gbe_index')['perc_gamble_joint_%s'%trial_type].to_frame()
# Combining everything into one dataframe
dfs.append(predicted)
ms[trial_type] = m
perc_predicted_sep_trial = pd.concat(dfs, axis = 1)
# Removing sessions that were not in initial dataframe
perc_predicted_sep_trial = perc_predicted_sep_trial.loc[df.gbe_index.unique()]
return perc_predicted_sep_trial, ms
| 41.420213
| 137
| 0.680236
| 1,015
| 7,787
| 4.967488
| 0.13399
| 0.067434
| 0.051567
| 0.087267
| 0.982547
| 0.982547
| 0.982547
| 0.982547
| 0.982547
| 0.982547
| 0
| 0.005437
| 0.196867
| 7,787
| 187
| 138
| 41.641711
| 0.800768
| 0.130217
| 0
| 0.970588
| 1
| 0.044118
| 0.363177
| 0.121158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073529
| false
| 0
| 0.073529
| 0
| 0.220588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c49ac6902b6e6cfa9cc1d98140a2ea956958341e
| 2,540
|
py
|
Python
|
distributed_energy_management/configuration/configuration_default_generators.py
|
sichu366/Optimization
|
62a6b32b4001930312cc267a4dd54aa3a7df406d
|
[
"MIT"
] | 10
|
2018-12-24T02:17:37.000Z
|
2022-03-19T07:44:21.000Z
|
distributed_energy_management/configuration/configuration_default_generators.py
|
wuyou33/youhua
|
0142cc59a987964ec7d5ae08ccaf5a8af3f592d1
|
[
"MIT"
] | null | null | null |
distributed_energy_management/configuration/configuration_default_generators.py
|
wuyou33/youhua
|
0142cc59a987964ec7d5ae08ccaf5a8af3f592d1
|
[
"MIT"
] | 1
|
2019-09-11T04:40:39.000Z
|
2019-09-11T04:40:39.000Z
|
# Defalut parameters of generation model
import configuration.configuration_time_line as timeline
default_AC_generator_parameters = \
{
"AREA": 1,
"GEN_STATUS": 1, # The generation status, >0 means avalible, otherwise, unavaliable
"PG": 0,
"QG": 0,
"PMAX": 3000,
"PMIN": 0,
"QMAX": 3000,
"QMIN": -3000,
"SMAX": 3000,
"VG": 1.0,
"RAMP_AGC": 3000,
"RAMP_10": 3000,
"PF_LIMIT": [-1, 1],
"APF": 0, # The droop parameters
"COST_START_UP": 0,
"COST_SHUT_DOWN": 0,
"COST_MODEL": 2,
"NCOST": 3,
"COST": [0.01, 2.0, 4.0],
"TIME_GENERATED": timeline.default_time["Base_time"],
"TIME_APPLIED": [timeline.default_time["Base_time"], timeline.default_time["Look_ahead_time_uc"]],
"TIME_COMMANDED": timeline.default_time["Base_time"],
"COMMAND_START_UP": 0,
"COMMAND_SET_POINT_VG": 0,
"COMMAND_SET_POINT_PG": 0,
"COMMAND_SET_POINT_QG": 0,
"COMMAND_RESERVE": 0
}
default_DC_generator_parameters = \
{
"AREA": 1,
"GEN_STATUS": 1, # The generation status, >0 means avalible, otherwise, unavaliable
"PG": 0,
"PMAX": 3000,
"PMIN": 0,
"VG": 1.0,
"RAMP_AGC": 3000,
"RAMP_10": 3000,
"APF": 0, # The droop parameters
"COST_START_UP": 0,
"COST_SHUT_DOWN": 0,
"COST_MODEL": 2,
"NCOST": 3,
"COST": [0.01, 2.0, 4.0],
"TIME_GENERATED": timeline.default_time["Base_time"],
"TIME_APPLIED": [timeline.default_time["Base_time"], timeline.default_time["Look_ahead_time_uc"]],
"TIME_COMMANDED": timeline.default_time["Base_time"],
"COMMAND_START_UP": 0,
"COMMAND_SET_POINT_VG": 0,
"COMMAND_SET_POINT_PG": 0,
"COMMAND_RESERVE": 0
}
default_RES_generator_parameters = \
{
"AREA": 1,
"TYPE": 1,
"GEN_STATUS": 1, # The generation status, >0 means avalible, otherwise, unavaliable
"PG": 0,
"QG": 0,
"PMAX": 0000,
"PMIN": 0,
"QMAX": 3000,
"QMIN": -3000,
"SMAX": 3000,
"COST": 1000,
"TIME_GENERATED": timeline.default_time["Base_time"],
"TIME_APPLIED": [timeline.default_time["Base_time"], timeline.default_time["Look_ahead_time_uc"]],
"TIME_COMMANDED": timeline.default_time["Base_time"],
"COMMAND_CURT": 0,
"COMMAND_SET_POINT_PG": 0,
}
| 32.151899
| 106
| 0.56063
| 304
| 2,540
| 4.378289
| 0.220395
| 0.146506
| 0.1713
| 0.155522
| 0.879038
| 0.837716
| 0.823441
| 0.823441
| 0.779865
| 0.742299
| 0
| 0.066111
| 0.291339
| 2,540
| 79
| 107
| 32.151899
| 0.673333
| 0.108268
| 0
| 0.783784
| 0
| 0
| 0.308544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013514
| 0
| 0.013514
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4c2eac5df244165131df64cb6afd3fb777b205d
| 816
|
py
|
Python
|
src/tests/test_subscribe.py
|
63phc/lks
|
2d263cf528d5370a1ff480b323cfc8945765a152
|
[
"MIT"
] | 4
|
2018-10-31T19:09:50.000Z
|
2020-09-29T13:16:28.000Z
|
src/tests/test_subscribe.py
|
63phc/lks
|
2d263cf528d5370a1ff480b323cfc8945765a152
|
[
"MIT"
] | 70
|
2018-11-02T13:55:32.000Z
|
2022-03-18T13:06:45.000Z
|
src/tests/test_subscribe.py
|
63phc/lks
|
2d263cf528d5370a1ff480b323cfc8945765a152
|
[
"MIT"
] | 2
|
2018-11-05T09:59:26.000Z
|
2020-08-26T19:37:41.000Z
|
import json
import pytest
@pytest.mark.django_db
@pytest.mark.urls("apps.subscribe.urls")
def test_subscribe(client):
data = {
"email": "hello@world.com",
}
assert (
client.post(
"/subscribe/", data=json.dumps(data), content_type="application/json"
).status_code
== 201
)
assert (
client.post(
"/subscribe/", data=json.dumps(data), content_type="application/json"
).status_code
== 400
)
@pytest.mark.django_db
@pytest.mark.urls("apps.subscribe.urls")
def test_subscribe_bad_data_email(client):
data = {
"email": "helloworld.com",
}
assert (
client.post(
"/subscribe/", data=json.dumps(data), content_type="application/json"
).status_code
== 400
)
| 21.473684
| 81
| 0.583333
| 89
| 816
| 5.202247
| 0.325843
| 0.086393
| 0.103672
| 0.161987
| 0.812095
| 0.812095
| 0.812095
| 0.812095
| 0.812095
| 0.812095
| 0
| 0.015177
| 0.273284
| 816
| 37
| 82
| 22.054054
| 0.765599
| 0
| 0
| 0.625
| 0
| 0
| 0.193627
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 1
| 0.0625
| false
| 0
| 0.0625
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4da3f8ba73cf51d8c3cff959dc53b2d0617b5cf
| 20,616
|
py
|
Python
|
lp-src/lp.py
|
nce11/deepcover
|
129488e3593f8d69e352be1e613f44480e4033e6
|
[
"BSD-3-Clause"
] | 25
|
2018-03-14T21:23:00.000Z
|
2021-11-22T14:06:20.000Z
|
lp-src/lp.py
|
nce11/deepcover
|
129488e3593f8d69e352be1e613f44480e4033e6
|
[
"BSD-3-Clause"
] | 1
|
2018-12-17T03:52:57.000Z
|
2018-12-17T03:52:57.000Z
|
lp-src/lp.py
|
nce11/deepcover
|
129488e3593f8d69e352be1e613f44480e4033e6
|
[
"BSD-3-Clause"
] | 18
|
2018-03-14T19:20:45.000Z
|
2022-02-16T18:33:10.000Z
|
import cplex
import random
from util import *
from nnett import *
import sys
def rp_ssc(I, J, K, nnet, X, act):
var_names=['d']
objective=[1]
lower_bounds=[0.0]
upper_bounds=[1.0]
N=len(act) # #layers
for i in range(0, N):
if i>I+1: break
M=len(act[i]) # #neurons at layer i
for j in range(0, M):
if i==I+1 and j!=K: continue
var_names.append('x_'+str(i)+'_'+str(j))
objective.append(0)
lower_bounds.append(-cplex.infinity)
upper_bounds.append(cplex.infinity)
constraints=[]
rhs=[]
constraint_senses=[]
constraint_names=[]
for i in range(0, len(X)):
# x<=x0+d
constraints.append([[0, i+1], [-1, 1]])
rhs.append(X[i])
constraint_senses.append("L")
constraint_names.append("x<=x"+str(i)+"+d")
# x>=x0-d
constraints.append([[0, i+1], [1, 1]])
rhs.append(X[i])
constraint_senses.append("G")
constraint_names.append("x>=x"+str(i)+"-d")
# x<=1
constraints.append([[i+1], [1]])
rhs.append(1.0)
constraint_senses.append("L")
constraint_names.append("x<=1")
# x>=0
constraints.append([[i+1], [1]])
rhs.append(0.0)
constraint_senses.append("G")
constraint_names.append("x>=0")
# there is nothing to constrain for layer 0
# and we start from layer 1
# the last layer shall be handled individually
for i in range(1, I+2):
M=len(act[i]) # number of neurons at layer i
for j in range(0, M):
#### for layer (I+1) we only need to access one neuron
if i==I+1 and j!=K: continue
constraint=[[],[]]
constraint[0].append("x_"+str(i)+"_"+str(j))
constraint[1].append(-1)
for k in range(0, len(act[i-1])):
constraint[0].append("x_"+str(i-1)+"_"+str(k))
if i==1 or act[i-1][k]>0:
if not (i-1==I and k==J):
constraint[1].append(nnet.weights[i-1][k][j])
else:
constraint[1].append(0)
else:
if not (i-1==I and k==J):
constraint[1].append(0)
else:
constraint[1].append(nnet.weights[i-1][k][j])
constraints.append(constraint)
rhs.append(-nnet.biases[i][j])
constraint_senses.append("E")
constraint_names.append("eq:"+"x_"+str(i)+"_"+str(j))
###### ReLU
if i<N-1:
_constraint=[[],[]]
_constraint[0].append("x_"+str(i)+"_"+str(j))
_constraint[1].append(1)
constraints.append(_constraint)
rhs.append(0)
if not( (i==I and j==J) or (i==I+1 and j==K) ):
if act[i][j]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu:"+"x_"+str(i)+"_"+str(j))
else:
if act[i][j]>0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("not relu:"+"x_"+str(i)+"_"+str(j))
if I==N-2: # I+1==N-1
#### Now, we are at the output layer
#### x_{N-1, K}>=x_{N-1,old_label}
label=np.argmax(act[N-1])
for i in range(0, len(act[N-1])):
if i!=K: continue
constraint=[[],[]]
constraint[0].append("x_"+str(N-1)+"_"+str(i))
constraint[1].append(1)
#constraint[0].append("x_"+str(N-1)+"_"+str(label))
#constraint[1].append(-1)
constraints.append(constraint)
rhs.append(0.0)
if act[N-1][K]>0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("not K")
###### solve
try:
problem=cplex.Cplex()
problem.variables.add(obj = objective,
lb = lower_bounds,
ub = upper_bounds,
names = var_names)
problem.linear_constraints.add(lin_expr=constraints,
senses = constraint_senses,
rhs = rhs,
names = constraint_names)
problem.solve()
####
d=problem.solution.get_values("d")
new_x=[]
for i in range(0, len(X)):
v=(problem.solution.get_values('x_0_'+str(i)))
if v<0 or v>1: return False, _, _
new_x.append(v)
if d==0 or d==1:
return False, _, _, _, _
#print problem.variables.get_num(), problem.linear_constraints.get_num()
return True, new_x, d, problem.variables.get_num(), problem.linear_constraints.get_num()
except:
return False,[],-1, -1, -1
try:
d=problem.solution.get_values("d")
print 'd is {0}'.format(d)
new_x=[]
#for i in len(X):
# new_x.append(problem.solution.get_values('x_0_'+str(i)))
#return True, new_x, d
except:
print 'Exception for feasible model???'
sys.exit(0)
def rp_dsc(I, J, nnet, X, act):
var_names=['d']
objective=[1]
lower_bounds=[0.0]
upper_bounds=[1.0]
N=len(act) # #layers
for i in range(0, N):
M=len(act[i]) # #neurons at layer i
for j in range(0, M):
var_names.append('x_'+str(i)+'_'+str(j))
objective.append(0)
lower_bounds.append(-cplex.infinity)
upper_bounds.append(cplex.infinity)
constraints=[]
rhs=[]
constraint_senses=[]
constraint_names=[]
for i in range(0, len(X)):
# x<=x0+d
constraints.append([[0, i+1], [-1, 1]])
rhs.append(X[i])
constraint_senses.append("L")
constraint_names.append("x<=x"+str(i)+"+d")
# x>=x0-d
constraints.append([[0, i+1], [1, 1]])
rhs.append(X[i])
constraint_senses.append("G")
constraint_names.append("x>=x"+str(i)+"-d")
# x<=1
constraints.append([[i+1], [1]])
rhs.append(1.0)
constraint_senses.append("L")
constraint_names.append("x<=1")
# x>=0
constraints.append([[i+1], [1]])
rhs.append(0.0)
constraint_senses.append("G")
constraint_names.append("x>=0")
# there is nothing to constrain for layer 0
# and we start from layer 1
# the last layer shall be handled individually
for i in range(1, I+1):
M=len(act[i]) # number of neurons at layer i
for j in range(0, M):
#### for layer (I+1) we only need to access one neuron
if i==I and j!=J: continue
constraint=[[],[]]
constraint[0].append("x_"+str(i)+"_"+str(j))
constraint[1].append(-1)
for k in range(0, len(act[i-1])):
constraint[0].append("x_"+str(i-1)+"_"+str(k))
if i==1 or act[i-1][k]>0:
constraint[1].append(nnet.weights[i-1][k][j])
else:
constraint[1].append(0)
constraints.append(constraint)
rhs.append(-nnet.biases[i][j])
constraint_senses.append("E")
constraint_names.append("eq:"+"x_"+str(i)+"_"+str(j))
###### ReLU
if i<N-1:
_constraint=[[],[]]
_constraint[0].append("x_"+str(i)+"_"+str(j))
_constraint[1].append(1)
constraints.append(_constraint)
if not(i==I and j==J):
rhs.append(0)
if act[i][j]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu:"+"x_"+str(i)+"_"+str(j))
else: ## I+1, K
## ReLU sign does not change
rhs.append(0)
if act[i][j]>0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("relu:"+"x_"+str(i)+"_"+str(j))
if I==N-1: # I+1==N-1
#### Now, we are at the output layer
#### x_{N-1, K}>=x_{N-1,old_label}
label=np.argmax(act[N-1])
for i in range(0, len(act[N-1])):
if i!=J: continue
constraint=[[],[]]
constraint[0].append("x_"+str(N-1)+"_"+str(i))
constraint[1].append(1)
constraints.append(constraint)
##1) ReLU sign does not change
rhs.append(0)
if act[I][J]>0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("relu sign:"+"x_"+str(I)+"_"+str(J))
###### solve
try:
problem=cplex.Cplex()
problem.variables.add(obj = objective,
lb = lower_bounds,
ub = upper_bounds,
names = var_names)
problem.linear_constraints.add(lin_expr=constraints,
senses = constraint_senses,
rhs = rhs,
names = constraint_names)
problem.solve()
####
d=problem.solution.get_values("d")
new_x=[]
for i in range(0, len(X)):
v=(problem.solution.get_values('x_0_'+str(i)))
if v<0 or v>1: return False, _, _
new_x.append(v)
if d==0 or d==1:
return False, _, _
return True, new_x, d
except:
return False,[],-1
try:
d=problem.solution.get_values("d")
print 'd is {0}'.format(d)
new_x=[]
#for i in len(X):
# new_x.append(problem.solution.get_values('x_0_'+str(i)))
#return True, new_x, d
except:
print 'Exception for feasible model???'
sys.exit(0)
def rp_svc(I, J, K, nnet, X, act, sfactor):
var_names=['d']
objective=[1]
lower_bounds=[0.0]
upper_bounds=[1.0]
N=len(act) # #layers
for i in range(0, N):
M=len(act[i]) # #neurons at layer i
for j in range(0, M):
var_names.append('x_'+str(i)+'_'+str(j))
objective.append(0)
lower_bounds.append(-cplex.infinity)
upper_bounds.append(cplex.infinity)
constraints=[]
rhs=[]
constraint_senses=[]
constraint_names=[]
for i in range(0, len(X)):
# x<=x0+d
constraints.append([[0, i+1], [-1, 1]])
rhs.append(X[i])
constraint_senses.append("L")
constraint_names.append("x<=x"+str(i)+"+d")
# x>=x0-d
constraints.append([[0, i+1], [1, 1]])
rhs.append(X[i])
constraint_senses.append("G")
constraint_names.append("x>=x"+str(i)+"-d")
# x<=1
constraints.append([[i+1], [1]])
rhs.append(1.0)
constraint_senses.append("L")
constraint_names.append("x<=1")
# x>=0
constraints.append([[i+1], [1]])
rhs.append(0.0)
constraint_senses.append("G")
constraint_names.append("x>=0")
# there is nothing to constrain for layer 0
# and we start from layer 1
# the last layer shall be handled individually
for i in range(1, I+2):
M=len(act[i]) # number of neurons at layer i
for j in range(0, M):
#### for layer (I+1) we only need to access one neuron
if i==I+1 and j!=K: continue
constraint=[[],[]]
constraint[0].append("x_"+str(i)+"_"+str(j))
constraint[1].append(-1)
for k in range(0, len(act[i-1])):
constraint[0].append("x_"+str(i-1)+"_"+str(k))
if i==1 or act[i-1][k]>0:
if not (i-1==I and k==J):
constraint[1].append(nnet.weights[i-1][k][j])
else:
constraint[1].append(0)
else:
if not (i-1==I and k==J):
constraint[1].append(0)
else:
constraint[1].append(nnet.weights[i-1][k][j])
constraints.append(constraint)
rhs.append(-nnet.biases[i][j])
constraint_senses.append("E")
constraint_names.append("eq:"+"x_"+str(i)+"_"+str(j))
###### ReLU
if i<N-1:
_constraint=[[],[]]
_constraint[0].append("x_"+str(i)+"_"+str(j))
_constraint[1].append(1)
constraints.append(_constraint)
if not( (i==I and j==J) or (i==I+1 and j==K) ):
rhs.append(0)
if act[i][j]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu:"+"x_"+str(i)+"_"+str(j))
elif (i==I and j==J): #Activation change
rhs.append(0)
if act[i][j]>0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("not relu:"+"x_"+str(i)+"_"+str(j))
else: ## I+1, K
## ReLU sign does not change
rhs.append(0)
if act[i][j]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu:"+"x_"+str(i)+"_"+str(j))
## ReLU value changed
_constraint=[[],[]]
_constraint[0].append("x_"+str(i)+"_"+str(j))
_constraint[1].append(1)
constraints.append(_constraint)
rhs.append(sfactor*act[I+1][K])
if act[i][j]>0:
if sfactor>1.0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
else:
if sfactor>1.0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("relu value change:"+"x_"+str(i)+"_"+str(j))
if I==N-2: # I+1==N-1
#### Now, we are at the output layer
#### x_{N-1, K}>=x_{N-1,old_label}
label=np.argmax(act[N-1])
for i in range(0, len(act[N-1])):
if i!=K: continue
constraint=[[],[]]
constraint[0].append("x_"+str(N-1)+"_"+str(i))
constraint[1].append(1)
constraints.append(constraint)
##1) ReLU sign does not change
rhs.append(0)
if act[I+1][K]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu sign:"+"x_"+str(I+1)+"_"+str(K))
## ReLU value changed
_constraint=[[],[]]
_constraint[0].append("x_"+str(I+1)+"_"+str(K))
_constraint[1].append(1)
constraints.append(_constraint)
rhs.append(sfactor*act[I+1][K])
if act[I+1][K]>0:
if sfactor>1.0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
else:
if sfactor>1.0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("relu value change:"+"x_"+str(I+1)+"_"+str(K))
###### solve
try:
problem=cplex.Cplex()
problem.variables.add(obj = objective,
lb = lower_bounds,
ub = upper_bounds,
names = var_names)
problem.linear_constraints.add(lin_expr=constraints,
senses = constraint_senses,
rhs = rhs,
names = constraint_names)
problem.solve()
####
d=problem.solution.get_values("d")
new_x=[]
for i in range(0, len(X)):
v=(problem.solution.get_values('x_0_'+str(i)))
if v<0 or v>1: return False, _, _
new_x.append(v)
if d==0 or d==1:
return False, _, _
return True, new_x, d
except:
return False,[],-1
try:
d=problem.solution.get_values("d")
print 'd is {0}'.format(d)
new_x=[]
#for i in len(X):
# new_x.append(problem.solution.get_values('x_0_'+str(i)))
#return True, new_x, d
except:
print 'Exception for feasible model???'
sys.exit(0)
def rp_dvc(I, J, nnet, X, act, sfactor):
var_names=['d']
objective=[1]
lower_bounds=[0.0]
upper_bounds=[1.0]
N=len(act) # #layers
for i in range(0, N):
M=len(act[i]) # #neurons at layer i
for j in range(0, M):
var_names.append('x_'+str(i)+'_'+str(j))
objective.append(0)
lower_bounds.append(-cplex.infinity)
upper_bounds.append(cplex.infinity)
constraints=[]
rhs=[]
constraint_senses=[]
constraint_names=[]
for i in range(0, len(X)):
# x<=x0+d
constraints.append([[0, i+1], [-1, 1]])
rhs.append(X[i])
constraint_senses.append("L")
constraint_names.append("x<=x"+str(i)+"+d")
# x>=x0-d
constraints.append([[0, i+1], [1, 1]])
rhs.append(X[i])
constraint_senses.append("G")
constraint_names.append("x>=x"+str(i)+"-d")
# x<=1
constraints.append([[i+1], [1]])
rhs.append(1.0)
constraint_senses.append("L")
constraint_names.append("x<=1")
# x>=0
constraints.append([[i+1], [1]])
rhs.append(0.0)
constraint_senses.append("G")
constraint_names.append("x>=0")
# there is nothing to constrain for layer 0
# and we start from layer 1
# the last layer shall be handled individually
for i in range(1, I+1):
M=len(act[i]) # number of neurons at layer i
for j in range(0, M):
#### for layer (I+1) we only need to access one neuron
if i==I and j!=J: continue
constraint=[[],[]]
constraint[0].append("x_"+str(i)+"_"+str(j))
constraint[1].append(-1)
for k in range(0, len(act[i-1])):
constraint[0].append("x_"+str(i-1)+"_"+str(k))
if i==1 or act[i-1][k]>0:
constraint[1].append(nnet.weights[i-1][k][j])
else:
constraint[1].append(0)
constraints.append(constraint)
rhs.append(-nnet.biases[i][j])
constraint_senses.append("E")
constraint_names.append("eq:"+"x_"+str(i)+"_"+str(j))
###### ReLU
if i<N-1:
_constraint=[[],[]]
_constraint[0].append("x_"+str(i)+"_"+str(j))
_constraint[1].append(1)
constraints.append(_constraint)
if not(i==I and j==J):
rhs.append(0)
if act[i][j]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu:"+"x_"+str(i)+"_"+str(j))
else: ## I+1, K
## ReLU sign does not change
rhs.append(0)
if act[i][j]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu:"+"x_"+str(i)+"_"+str(j))
## ReLU value changed
_constraint=[[],[]]
_constraint[0].append("x_"+str(i)+"_"+str(j))
_constraint[1].append(1)
constraints.append(_constraint)
rhs.append(sfactor*act[I][J])
if act[i][j]>0:
if sfactor>1.0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
else:
if sfactor>1.0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("relu value change:"+"x_"+str(i)+"_"+str(j))
if I==N-1: # I+1==N-1
#### Now, we are at the output layer
#### x_{N-1, K}>=x_{N-1,old_label}
label=np.argmax(act[N-1])
for i in range(0, len(act[N-1])):
if i!=J: continue
constraint=[[],[]]
constraint[0].append("x_"+str(N-1)+"_"+str(i))
constraint[1].append(1)
constraints.append(constraint)
##1) ReLU sign does not change
rhs.append(0)
if act[I][J]>0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
constraint_names.append("relu sign:"+"x_"+str(I)+"_"+str(J))
## ReLU value changed
_constraint=[[],[]]
_constraint[0].append("x_"+str(I)+"_"+str(J))
_constraint[1].append(1)
constraints.append(_constraint)
rhs.append(sfactor*act[I][J])
if act[I][J]>0:
if sfactor>1.0:
constraint_senses.append("G")
else:
constraint_senses.append("L")
else:
if sfactor>1.0:
constraint_senses.append("L")
else:
constraint_senses.append("G")
constraint_names.append("relu value change:"+"x_"+str(I)+"_"+str(J))
###### solve
try:
problem=cplex.Cplex()
problem.variables.add(obj = objective,
lb = lower_bounds,
ub = upper_bounds,
names = var_names)
problem.linear_constraints.add(lin_expr=constraints,
senses = constraint_senses,
rhs = rhs,
names = constraint_names)
problem.solve()
####
d=problem.solution.get_values("d")
new_x=[]
for i in range(0, len(X)):
v=(problem.solution.get_values('x_0_'+str(i)))
if v<0 or v>1: return False, _, _
new_x.append(v)
if d==0 or d==1:
return False, _, _
return True, new_x, d
except:
return False,[],-1
try:
d=problem.solution.get_values("d")
print 'd is {0}'.format(d)
new_x=[]
#for i in len(X):
# new_x.append(problem.solution.get_values('x_0_'+str(i)))
#return True, new_x, d
except:
print 'Exception for feasible model???'
sys.exit(0)
| 29.706052
| 92
| 0.538756
| 2,936
| 20,616
| 3.654632
| 0.041894
| 0.10438
| 0.12712
| 0.024604
| 0.987139
| 0.984716
| 0.982759
| 0.982759
| 0.978658
| 0.9685
| 0
| 0.027388
| 0.284488
| 20,616
| 693
| 93
| 29.748918
| 0.70002
| 0.102251
| 0
| 0.961183
| 0
| 0
| 0.034428
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.009242
| null | null | 0.014787
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c4edd680c81a91af87d5fc6a78c4910468fd92ef
| 655,850
|
py
|
Python
|
UFO_models/SMEFTsim_MFV_alphaScheme_UFO/vertices.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 4
|
2020-12-29T03:42:43.000Z
|
2021-09-22T09:57:37.000Z
|
UFO_models/SMEFTsim_MFV_alphaScheme_UFO/vertices.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 3
|
2021-05-19T11:06:59.000Z
|
2021-12-11T00:12:02.000Z
|
UFO_models/SMEFTsim_MFV_alphaScheme_UFO/vertices.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 4
|
2021-09-22T09:57:39.000Z
|
2022-03-29T16:09:36.000Z
|
# This file was automatically created by FeynRules 2.3.35
# Mathematica version: 12.1.0 for Linux x86 (64-bit) (March 18, 2020)
# Date: Mon 11 Jan 2021 01:44:33
from object_library import all_vertices, Vertex
import particles as P
import couplings as C
import lorentz as L
V_1 = Vertex(name = 'V_1',
particles = [ P.a, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVV1, L.VVV4, L.VVV5, L.VVV7 ],
couplings = {(0,3):C.GC_178,(0,1):C.GC_3,(0,0):C.GC_385,(0,2):C.GC_393})
V_2 = Vertex(name = 'V_2',
particles = [ P.a, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_402})
V_3 = Vertex(name = 'V_3',
particles = [ P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV2, L.VVV3, L.VVV4, L.VVV6, L.VVV7 ],
couplings = {(0,4):C.GC_55,(0,2):C.GC_119,(0,0):C.GC_399,(0,3):C.GC_345,(0,1):C.GC_322})
V_4 = Vertex(name = 'V_4',
particles = [ P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_400})
V_5 = Vertex(name = 'V_5',
particles = [ P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_391})
V_6 = Vertex(name = 'V_6',
particles = [ P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_392})
V_7 = Vertex(name = 'V_7',
particles = [ P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_394})
V_8 = Vertex(name = 'V_8',
particles = [ P.g, P.g, P.g ],
color = [ 'f(1,2,3)' ],
lorentz = [ L.VVV4, L.VVV7 ],
couplings = {(0,1):C.GC_15,(0,0):C.GC_7})
V_9 = Vertex(name = 'V_9',
particles = [ P.g, P.g, P.g, P.g ],
color = [ 'f(-1,1,2)*f(3,4,-1)', 'f(-1,1,3)*f(2,4,-1)', 'f(-1,1,4)*f(2,3,-1)' ],
lorentz = [ L.VVVV1, L.VVVV2, L.VVVV4, L.VVVV5, L.VVVV7, L.VVVV8 ],
couplings = {(0,1):C.GC_61,(1,5):C.GC_61,(2,4):C.GC_61,(1,2):C.GC_8,(0,0):C.GC_8,(2,3):C.GC_8})
V_10 = Vertex(name = 'V_10',
particles = [ P.g, P.g, P.g, P.g, P.g ],
color = [ 'f(-2,1,2)*f(-1,-2,3)*f(4,5,-1)', 'f(-2,1,2)*f(-1,-2,4)*f(3,5,-1)', 'f(-2,1,2)*f(-1,-2,5)*f(3,4,-1)', 'f(-2,1,3)*f(-1,-2,2)*f(4,5,-1)', 'f(-2,1,3)*f(-1,-2,4)*f(2,5,-1)', 'f(-2,1,3)*f(-1,-2,5)*f(2,4,-1)', 'f(-2,1,4)*f(-1,-2,2)*f(3,5,-1)', 'f(-2,1,4)*f(-1,-2,3)*f(2,5,-1)', 'f(-2,1,4)*f(-1,-2,5)*f(2,3,-1)', 'f(-2,1,5)*f(-1,-2,2)*f(3,4,-1)', 'f(-2,1,5)*f(-1,-2,3)*f(2,4,-1)', 'f(-2,1,5)*f(-1,-2,4)*f(2,3,-1)', 'f(-2,2,3)*f(-1,-2,1)*f(4,5,-1)', 'f(-2,2,3)*f(-1,-2,4)*f(1,5,-1)', 'f(-2,2,3)*f(-1,-2,5)*f(1,4,-1)', 'f(-2,2,4)*f(-1,-2,1)*f(3,5,-1)', 'f(-2,2,4)*f(-1,-2,3)*f(1,5,-1)', 'f(-2,2,4)*f(-1,-2,5)*f(1,3,-1)', 'f(-2,2,5)*f(-1,-2,1)*f(3,4,-1)', 'f(-2,2,5)*f(-1,-2,3)*f(1,4,-1)', 'f(-2,2,5)*f(-1,-2,4)*f(1,3,-1)', 'f(-2,3,4)*f(-1,-2,1)*f(2,5,-1)', 'f(-2,3,4)*f(-1,-2,2)*f(1,5,-1)', 'f(-2,3,4)*f(-1,-2,5)*f(1,2,-1)', 'f(-2,3,5)*f(-1,-2,1)*f(2,4,-1)', 'f(-2,3,5)*f(-1,-2,2)*f(1,4,-1)', 'f(-2,3,5)*f(-1,-2,4)*f(1,2,-1)', 'f(-2,4,5)*f(-1,-2,1)*f(2,3,-1)', 'f(-2,4,5)*f(-1,-2,2)*f(1,3,-1)', 'f(-2,4,5)*f(-1,-2,3)*f(1,2,-1)' ],
lorentz = [ L.VVVVV1, L.VVVVV10, L.VVVVV11, L.VVVVV12, L.VVVVV13, L.VVVVV14, L.VVVVV15, L.VVVVV17, L.VVVVV18, L.VVVVV2, L.VVVVV4, L.VVVVV5, L.VVVVV6, L.VVVVV7, L.VVVVV8 ],
couplings = {(24,11):C.GC_64,(21,12):C.GC_63,(18,12):C.GC_64,(15,11):C.GC_63,(28,9):C.GC_64,(22,2):C.GC_64,(9,2):C.GC_63,(3,9):C.GC_63,(29,10):C.GC_64,(16,3):C.GC_64,(10,3):C.GC_63,(0,10):C.GC_63,(26,6):C.GC_63,(20,5):C.GC_63,(4,5):C.GC_64,(1,6):C.GC_64,(25,1):C.GC_64,(6,1):C.GC_63,(19,4):C.GC_64,(7,4):C.GC_63,(23,8):C.GC_63,(17,7):C.GC_63,(5,7):C.GC_64,(2,8):C.GC_64,(27,0):C.GC_64,(12,0):C.GC_63,(13,13):C.GC_64,(11,13):C.GC_63,(14,14):C.GC_63,(8,14):C.GC_64})
V_11 = Vertex(name = 'V_11',
particles = [ P.g, P.g, P.g, P.g, P.g, P.g ],
color = [ 'f(-3,1,2)*f(-2,3,4)*f(-1,-2,-3)*f(5,6,-1)', 'f(-3,1,2)*f(-2,3,5)*f(-1,-2,-3)*f(4,6,-1)', 'f(-3,1,2)*f(-2,3,6)*f(-1,-2,-3)*f(4,5,-1)', 'f(-3,1,2)*f(-2,4,5)*f(-1,-2,-3)*f(3,6,-1)', 'f(-3,1,2)*f(-2,4,6)*f(-1,-2,-3)*f(3,5,-1)', 'f(-3,1,2)*f(-2,5,6)*f(-1,-2,-3)*f(3,4,-1)', 'f(-3,1,3)*f(-2,2,4)*f(-1,-2,-3)*f(5,6,-1)', 'f(-3,1,3)*f(-2,2,5)*f(-1,-2,-3)*f(4,6,-1)', 'f(-3,1,3)*f(-2,2,6)*f(-1,-2,-3)*f(4,5,-1)', 'f(-3,1,3)*f(-2,4,5)*f(-1,-2,-3)*f(2,6,-1)', 'f(-3,1,3)*f(-2,4,6)*f(-1,-2,-3)*f(2,5,-1)', 'f(-3,1,3)*f(-2,5,6)*f(-1,-2,-3)*f(2,4,-1)', 'f(-3,1,4)*f(-2,2,3)*f(-1,-2,-3)*f(5,6,-1)', 'f(-3,1,4)*f(-2,2,5)*f(-1,-2,-3)*f(3,6,-1)', 'f(-3,1,4)*f(-2,2,6)*f(-1,-2,-3)*f(3,5,-1)', 'f(-3,1,4)*f(-2,3,5)*f(-1,-2,-3)*f(2,6,-1)', 'f(-3,1,4)*f(-2,3,6)*f(-1,-2,-3)*f(2,5,-1)', 'f(-3,1,4)*f(-2,5,6)*f(-1,-2,-3)*f(2,3,-1)', 'f(-3,1,5)*f(-2,2,3)*f(-1,-2,-3)*f(4,6,-1)', 'f(-3,1,5)*f(-2,2,4)*f(-1,-2,-3)*f(3,6,-1)', 'f(-3,1,5)*f(-2,2,6)*f(-1,-2,-3)*f(3,4,-1)', 'f(-3,1,5)*f(-2,3,4)*f(-1,-2,-3)*f(2,6,-1)', 'f(-3,1,5)*f(-2,3,6)*f(-1,-2,-3)*f(2,4,-1)', 'f(-3,1,5)*f(-2,4,6)*f(-1,-2,-3)*f(2,3,-1)', 'f(-3,1,6)*f(-2,2,3)*f(-1,-2,-3)*f(4,5,-1)', 'f(-3,1,6)*f(-2,2,4)*f(-1,-2,-3)*f(3,5,-1)', 'f(-3,1,6)*f(-2,2,5)*f(-1,-2,-3)*f(3,4,-1)', 'f(-3,1,6)*f(-2,3,4)*f(-1,-2,-3)*f(2,5,-1)', 'f(-3,1,6)*f(-2,3,5)*f(-1,-2,-3)*f(2,4,-1)', 'f(-3,1,6)*f(-2,4,5)*f(-1,-2,-3)*f(2,3,-1)', 'f(-3,2,3)*f(-2,1,4)*f(-1,-2,-3)*f(5,6,-1)', 'f(-3,2,3)*f(-2,1,5)*f(-1,-2,-3)*f(4,6,-1)', 'f(-3,2,3)*f(-2,1,6)*f(-1,-2,-3)*f(4,5,-1)', 'f(-3,2,3)*f(-2,4,5)*f(-1,-2,-3)*f(1,6,-1)', 'f(-3,2,3)*f(-2,4,6)*f(-1,-2,-3)*f(1,5,-1)', 'f(-3,2,3)*f(-2,5,6)*f(-1,-2,-3)*f(1,4,-1)', 'f(-3,2,4)*f(-2,1,3)*f(-1,-2,-3)*f(5,6,-1)', 'f(-3,2,4)*f(-2,1,5)*f(-1,-2,-3)*f(3,6,-1)', 'f(-3,2,4)*f(-2,1,6)*f(-1,-2,-3)*f(3,5,-1)', 'f(-3,2,4)*f(-2,3,5)*f(-1,-2,-3)*f(1,6,-1)', 'f(-3,2,4)*f(-2,3,6)*f(-1,-2,-3)*f(1,5,-1)', 'f(-3,2,4)*f(-2,5,6)*f(-1,-2,-3)*f(1,3,-1)', 'f(-3,2,5)*f(-2,1,3)*f(-1,-2,-3)*f(4,6,-1)', 'f(-3,2,5)*f(-2,1,4)*f(-1,-2,-3)*f(3,6,-1)', 'f(-3,2,5)*f(-2,1,6)*f(-1,-2,-3)*f(3,4,-1)', 'f(-3,2,5)*f(-2,3,4)*f(-1,-2,-3)*f(1,6,-1)', 'f(-3,2,5)*f(-2,3,6)*f(-1,-2,-3)*f(1,4,-1)', 'f(-3,2,5)*f(-2,4,6)*f(-1,-2,-3)*f(1,3,-1)', 'f(-3,2,6)*f(-2,1,3)*f(-1,-2,-3)*f(4,5,-1)', 'f(-3,2,6)*f(-2,1,4)*f(-1,-2,-3)*f(3,5,-1)', 'f(-3,2,6)*f(-2,1,5)*f(-1,-2,-3)*f(3,4,-1)', 'f(-3,2,6)*f(-2,3,4)*f(-1,-2,-3)*f(1,5,-1)', 'f(-3,2,6)*f(-2,3,5)*f(-1,-2,-3)*f(1,4,-1)', 'f(-3,2,6)*f(-2,4,5)*f(-1,-2,-3)*f(1,3,-1)', 'f(-3,3,4)*f(-2,1,2)*f(-1,-2,-3)*f(5,6,-1)', 'f(-3,3,4)*f(-2,1,5)*f(-1,-2,-3)*f(2,6,-1)', 'f(-3,3,4)*f(-2,1,6)*f(-1,-2,-3)*f(2,5,-1)', 'f(-3,3,4)*f(-2,2,5)*f(-1,-2,-3)*f(1,6,-1)', 'f(-3,3,4)*f(-2,2,6)*f(-1,-2,-3)*f(1,5,-1)', 'f(-3,3,4)*f(-2,5,6)*f(-1,-2,-3)*f(1,2,-1)', 'f(-3,3,5)*f(-2,1,2)*f(-1,-2,-3)*f(4,6,-1)', 'f(-3,3,5)*f(-2,1,4)*f(-1,-2,-3)*f(2,6,-1)', 'f(-3,3,5)*f(-2,1,6)*f(-1,-2,-3)*f(2,4,-1)', 'f(-3,3,5)*f(-2,2,4)*f(-1,-2,-3)*f(1,6,-1)', 'f(-3,3,5)*f(-2,2,6)*f(-1,-2,-3)*f(1,4,-1)', 'f(-3,3,5)*f(-2,4,6)*f(-1,-2,-3)*f(1,2,-1)', 'f(-3,3,6)*f(-2,1,2)*f(-1,-2,-3)*f(4,5,-1)', 'f(-3,3,6)*f(-2,1,4)*f(-1,-2,-3)*f(2,5,-1)', 'f(-3,3,6)*f(-2,1,5)*f(-1,-2,-3)*f(2,4,-1)', 'f(-3,3,6)*f(-2,2,4)*f(-1,-2,-3)*f(1,5,-1)', 'f(-3,3,6)*f(-2,2,5)*f(-1,-2,-3)*f(1,4,-1)', 'f(-3,3,6)*f(-2,4,5)*f(-1,-2,-3)*f(1,2,-1)', 'f(-3,4,5)*f(-2,1,2)*f(-1,-2,-3)*f(3,6,-1)', 'f(-3,4,5)*f(-2,1,3)*f(-1,-2,-3)*f(2,6,-1)', 'f(-3,4,5)*f(-2,1,6)*f(-1,-2,-3)*f(2,3,-1)', 'f(-3,4,5)*f(-2,2,3)*f(-1,-2,-3)*f(1,6,-1)', 'f(-3,4,5)*f(-2,2,6)*f(-1,-2,-3)*f(1,3,-1)', 'f(-3,4,5)*f(-2,3,6)*f(-1,-2,-3)*f(1,2,-1)', 'f(-3,4,6)*f(-2,1,2)*f(-1,-2,-3)*f(3,5,-1)', 'f(-3,4,6)*f(-2,1,3)*f(-1,-2,-3)*f(2,5,-1)', 'f(-3,4,6)*f(-2,1,5)*f(-1,-2,-3)*f(2,3,-1)', 'f(-3,4,6)*f(-2,2,3)*f(-1,-2,-3)*f(1,5,-1)', 'f(-3,4,6)*f(-2,2,5)*f(-1,-2,-3)*f(1,3,-1)', 'f(-3,4,6)*f(-2,3,5)*f(-1,-2,-3)*f(1,2,-1)', 'f(-3,5,6)*f(-2,1,2)*f(-1,-2,-3)*f(3,4,-1)', 'f(-3,5,6)*f(-2,1,3)*f(-1,-2,-3)*f(2,4,-1)', 'f(-3,5,6)*f(-2,1,4)*f(-1,-2,-3)*f(2,3,-1)', 'f(-3,5,6)*f(-2,2,3)*f(-1,-2,-3)*f(1,4,-1)', 'f(-3,5,6)*f(-2,2,4)*f(-1,-2,-3)*f(1,3,-1)', 'f(-3,5,6)*f(-2,3,4)*f(-1,-2,-3)*f(1,2,-1)' ],
lorentz = [ L.VVVVVV1, L.VVVVVV10, L.VVVVVV11, L.VVVVVV12, L.VVVVVV13, L.VVVVVV14, L.VVVVVV15, L.VVVVVV16, L.VVVVVV2, L.VVVVVV3, L.VVVVVV4, L.VVVVVV5, L.VVVVVV6, L.VVVVVV7, L.VVVVVV9 ],
couplings = {(65,10):C.GC_66,(71,12):C.GC_67,(77,12):C.GC_66,(83,10):C.GC_67,(41,8):C.GC_66,(53,2):C.GC_66,(76,2):C.GC_67,(88,8):C.GC_67,(35,9):C.GC_66,(52,5):C.GC_66,(64,5):C.GC_67,(87,9):C.GC_67,(34,4):C.GC_67,(40,3):C.GC_67,(69,3):C.GC_66,(81,4):C.GC_66,(17,9):C.GC_67,(23,4):C.GC_66,(80,4):C.GC_67,(86,9):C.GC_66,(11,8):C.GC_67,(22,3):C.GC_66,(68,3):C.GC_67,(85,8):C.GC_66,(9,2):C.GC_67,(15,5):C.GC_67,(61,5):C.GC_66,(73,2):C.GC_66,(4,10):C.GC_67,(14,5):C.GC_66,(49,5):C.GC_67,(78,10):C.GC_66,(3,12):C.GC_66,(19,3):C.GC_67,(37,3):C.GC_66,(72,12):C.GC_67,(2,12):C.GC_67,(8,2):C.GC_66,(48,2):C.GC_67,(66,12):C.GC_66,(1,10):C.GC_66,(18,4):C.GC_67,(31,4):C.GC_66,(60,10):C.GC_67,(6,8):C.GC_66,(12,9):C.GC_66,(30,9):C.GC_67,(36,8):C.GC_67,(47,14):C.GC_66,(82,14):C.GC_67,(46,6):C.GC_66,(70,6):C.GC_67,(33,7):C.GC_67,(39,1):C.GC_67,(63,1):C.GC_66,(75,7):C.GC_66,(29,7):C.GC_66,(74,7):C.GC_67,(28,1):C.GC_66,(62,1):C.GC_67,(10,14):C.GC_67,(16,6):C.GC_67,(67,6):C.GC_66,(79,14):C.GC_66,(25,1):C.GC_67,(38,1):C.GC_66,(13,6):C.GC_66,(43,6):C.GC_67,(24,7):C.GC_67,(32,7):C.GC_66,(7,14):C.GC_66,(42,14):C.GC_67,(59,0):C.GC_66,(89,0):C.GC_67,(51,11):C.GC_66,(58,11):C.GC_67,(21,11):C.GC_67,(55,11):C.GC_66,(5,0):C.GC_67,(20,11):C.GC_66,(50,11):C.GC_67,(84,0):C.GC_66,(0,0):C.GC_66,(54,0):C.GC_67,(45,13):C.GC_67,(57,13):C.GC_66,(27,13):C.GC_66,(56,13):C.GC_67,(26,13):C.GC_67,(44,13):C.GC_66})
V_12 = Vertex(name = 'V_12',
particles = [ P.a, P.a, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV10, L.VVVV3 ],
couplings = {(0,0):C.GC_179,(0,1):C.GC_5})
V_13 = Vertex(name = 'V_13',
particles = [ P.a, P.a, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_404})
V_14 = Vertex(name = 'V_14',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6, L.VVVV9 ],
couplings = {(0,1):C.GC_58,(0,0):C.GC_120})
V_15 = Vertex(name = 'V_15',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_401})
V_16 = Vertex(name = 'V_16',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_396})
V_17 = Vertex(name = 'V_17',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_397})
V_18 = Vertex(name = 'V_18',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_398})
V_19 = Vertex(name = 'V_19',
particles = [ P.a, P.a, P.a, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVVV3 ],
couplings = {(0,0):C.GC_180})
V_20 = Vertex(name = 'V_20',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV10, L.VVVV3 ],
couplings = {(0,0):C.GC_142,(0,1):C.GC_82})
V_21 = Vertex(name = 'V_21',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_323})
V_22 = Vertex(name = 'V_22',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_326})
V_23 = Vertex(name = 'V_23',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_329})
V_24 = Vertex(name = 'V_24',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_387})
V_25 = Vertex(name = 'V_25',
particles = [ P.a, P.a, P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVVV19 ],
couplings = {(0,0):C.GC_60})
V_26 = Vertex(name = 'V_26',
particles = [ P.a, P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVVV21 ],
couplings = {(0,0):C.GC_145})
V_27 = Vertex(name = 'V_27',
particles = [ P.a, P.a, P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVVVV8 ],
couplings = {(0,0):C.GC_147})
V_28 = Vertex(name = 'V_28',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV10, L.VVVV3 ],
couplings = {(0,0):C.GC_143,(0,1):C.GC_84})
V_29 = Vertex(name = 'V_29',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_324})
V_30 = Vertex(name = 'V_30',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_327})
V_31 = Vertex(name = 'V_31',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_328})
V_32 = Vertex(name = 'V_32',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_403})
V_33 = Vertex(name = 'V_33',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVVV9 ],
couplings = {(0,0):C.GC_146})
V_34 = Vertex(name = 'V_34',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVVV16 ],
couplings = {(0,0):C.GC_88})
V_35 = Vertex(name = 'V_35',
particles = [ P.a, P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVVVV17 ],
couplings = {(0,0):C.GC_90})
V_36 = Vertex(name = 'V_36',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVVVV8 ],
couplings = {(0,0):C.GC_80})
V_37 = Vertex(name = 'V_37',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVVV20 ],
couplings = {(0,0):C.GC_89})
V_38 = Vertex(name = 'V_38',
particles = [ P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSS1, L.SSSS2, L.SSSS3 ],
couplings = {(0,0):C.GC_9,(0,2):C.GC_17,(0,1):C.GC_18})
V_39 = Vertex(name = 'V_39',
particles = [ P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_282})
V_40 = Vertex(name = 'V_40',
particles = [ P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_283})
V_41 = Vertex(name = 'V_41',
particles = [ P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_284})
V_42 = Vertex(name = 'V_42',
particles = [ P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_285})
V_43 = Vertex(name = 'V_43',
particles = [ P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_286})
V_44 = Vertex(name = 'V_44',
particles = [ P.H, P.H, P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSSSS1 ],
couplings = {(0,0):C.GC_16})
V_45 = Vertex(name = 'V_45',
particles = [ P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSS1, L.SSS2, L.SSS3 ],
couplings = {(0,0):C.GC_227,(0,2):C.GC_229,(0,1):C.GC_230})
V_46 = Vertex(name = 'V_46',
particles = [ P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_375})
V_47 = Vertex(name = 'V_47',
particles = [ P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_376})
V_48 = Vertex(name = 'V_48',
particles = [ P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_377})
V_49 = Vertex(name = 'V_49',
particles = [ P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_378})
V_50 = Vertex(name = 'V_50',
particles = [ P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_379})
V_51 = Vertex(name = 'V_51',
particles = [ P.H, P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSSSS1 ],
couplings = {(0,0):C.GC_228})
V_52 = Vertex(name = 'V_52',
particles = [ P.a, P.a, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS4 ],
couplings = {(0,0):C.GC_49})
V_53 = Vertex(name = 'V_53',
particles = [ P.a, P.a, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS4 ],
couplings = {(0,0):C.GC_176})
V_54 = Vertex(name = 'V_54',
particles = [ P.a, P.a, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS4 ],
couplings = {(0,0):C.GC_187})
V_55 = Vertex(name = 'V_55',
particles = [ P.a, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_190})
V_56 = Vertex(name = 'V_56',
particles = [ P.a, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_233})
V_57 = Vertex(name = 'V_57',
particles = [ P.a, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_278})
V_58 = Vertex(name = 'V_58',
particles = [ P.a, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_281})
V_59 = Vertex(name = 'V_59',
particles = [ P.g, P.g, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.VVSS4 ],
couplings = {(0,0):C.GC_19})
V_60 = Vertex(name = 'V_60',
particles = [ P.g, P.g, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.VVS4, L.VVS5, L.VVS6, L.VVS7 ],
couplings = {(0,0):C.GC_191,(0,2):C.GC_204,(0,1):C.GC_200,(0,3):C.GC_195})
V_61 = Vertex(name = 'V_61',
particles = [ P.g, P.g, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_231})
V_62 = Vertex(name = 'V_62',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2, L.VVSS4 ],
couplings = {(0,1):C.GC_20,(0,0):C.GC_81})
V_63 = Vertex(name = 'V_63',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_287})
V_64 = Vertex(name = 'V_64',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_318})
V_65 = Vertex(name = 'V_65',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_384})
V_66 = Vertex(name = 'V_66',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_325})
V_67 = Vertex(name = 'V_67',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_386})
V_68 = Vertex(name = 'V_68',
particles = [ P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2, L.VVS4 ],
couplings = {(0,1):C.GC_232,(0,0):C.GC_243})
V_69 = Vertex(name = 'V_69',
particles = [ P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_380})
V_70 = Vertex(name = 'V_70',
particles = [ P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_405})
V_71 = Vertex(name = 'V_71',
particles = [ P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_406})
V_72 = Vertex(name = 'V_72',
particles = [ P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_407})
V_73 = Vertex(name = 'V_73',
particles = [ P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_408})
V_74 = Vertex(name = 'V_74',
particles = [ P.a, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS4 ],
couplings = {(0,0):C.GC_188})
V_75 = Vertex(name = 'V_75',
particles = [ P.a, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS4 ],
couplings = {(0,0):C.GC_174})
V_76 = Vertex(name = 'V_76',
particles = [ P.a, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS4 ],
couplings = {(0,0):C.GC_175})
V_77 = Vertex(name = 'V_77',
particles = [ P.a, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_194})
V_78 = Vertex(name = 'V_78',
particles = [ P.a, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_382})
V_79 = Vertex(name = 'V_79',
particles = [ P.a, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_276})
V_80 = Vertex(name = 'V_80',
particles = [ P.a, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_277})
V_81 = Vertex(name = 'V_81',
particles = [ P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS1, L.VVSS2, L.VVSS3, L.VVSS4 ],
couplings = {(0,3):C.GC_50,(0,0):C.GC_177,(0,1):C.GC_83,(0,2):C.GC_189})
V_82 = Vertex(name = 'V_82',
particles = [ P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2, L.VVSS4 ],
couplings = {(0,1):C.GC_186,(0,0):C.GC_288})
V_83 = Vertex(name = 'V_83',
particles = [ P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_388})
V_84 = Vertex(name = 'V_84',
particles = [ P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_389})
V_85 = Vertex(name = 'V_85',
particles = [ P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_390})
V_86 = Vertex(name = 'V_86',
particles = [ P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_395})
V_87 = Vertex(name = 'V_87',
particles = [ P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS1, L.VVS2, L.VVS3, L.VVS4 ],
couplings = {(0,3):C.GC_234,(0,0):C.GC_279,(0,1):C.GC_244,(0,2):C.GC_383})
V_88 = Vertex(name = 'V_88',
particles = [ P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2, L.VVS4 ],
couplings = {(0,1):C.GC_280,(0,0):C.GC_381})
V_89 = Vertex(name = 'V_89',
particles = [ P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_409})
V_90 = Vertex(name = 'V_90',
particles = [ P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_410})
V_91 = Vertex(name = 'V_91',
particles = [ P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_411})
V_92 = Vertex(name = 'V_92',
particles = [ P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_412})
V_93 = Vertex(name = 'V_93',
particles = [ P.g, P.g, P.g, P.H, P.H ],
color = [ 'f(1,2,3)' ],
lorentz = [ L.VVVSS3 ],
couplings = {(0,0):C.GC_62})
V_94 = Vertex(name = 'V_94',
particles = [ P.g, P.g, P.g, P.H ],
color = [ 'f(1,2,3)' ],
lorentz = [ L.VVVS3, L.VVVS4, L.VVVS5, L.VVVS6, L.VVVS7 ],
couplings = {(0,2):C.GC_196,(0,4):C.GC_205,(0,3):C.GC_201,(0,1):C.GC_198,(0,0):C.GC_192})
V_95 = Vertex(name = 'V_95',
particles = [ P.g, P.g, P.g, P.H ],
color = [ 'f(1,2,3)' ],
lorentz = [ L.VVVS3 ],
couplings = {(0,0):C.GC_238})
V_96 = Vertex(name = 'V_96',
particles = [ P.g, P.g, P.g, P.g, P.H, P.H ],
color = [ 'f(-1,1,2)*f(3,4,-1)', 'f(-1,1,3)*f(2,4,-1)', 'f(-1,1,4)*f(2,3,-1)' ],
lorentz = [ L.VVVVSS1, L.VVVVSS3, L.VVVVSS4 ],
couplings = {(1,1):C.GC_65,(0,0):C.GC_65,(2,2):C.GC_65})
V_97 = Vertex(name = 'V_97',
particles = [ P.g, P.g, P.g, P.g, P.H ],
color = [ 'f(-1,1,2)*f(3,4,-1)', 'f(-1,1,3)*f(2,4,-1)', 'f(-1,1,4)*f(2,3,-1)' ],
lorentz = [ L.VVVVS1, L.VVVVS10, L.VVVVS11, L.VVVVS12, L.VVVVS13, L.VVVVS14, L.VVVVS15, L.VVVVS16, L.VVVVS17, L.VVVVS19, L.VVVVS2, L.VVVVS3, L.VVVVS4, L.VVVVS7, L.VVVVS8 ],
couplings = {(2,5):C.GC_197,(2,8):C.GC_206,(1,4):C.GC_197,(1,9):C.GC_206,(2,6):C.GC_203,(0,11):C.GC_199,(0,12):C.GC_207,(1,7):C.GC_203,(0,3):C.GC_202,(1,2):C.GC_199,(2,1):C.GC_199,(0,10):C.GC_197,(1,13):C.GC_193,(0,0):C.GC_193,(2,14):C.GC_193})
V_98 = Vertex(name = 'V_98',
particles = [ P.g, P.g, P.g, P.g, P.H ],
color = [ 'f(-1,1,2)*f(3,4,-1)', 'f(-1,1,3)*f(2,4,-1)', 'f(-1,1,4)*f(2,3,-1)' ],
lorentz = [ L.VVVVS1, L.VVVVS7, L.VVVVS8 ],
couplings = {(1,1):C.GC_239,(0,0):C.GC_239,(2,2):C.GC_239})
V_99 = Vertex(name = 'V_99',
particles = [ P.a, P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVVSS1, L.VVVSS3 ],
couplings = {(0,1):C.GC_56,(0,0):C.GC_141})
V_100 = Vertex(name = 'V_100',
particles = [ P.a, P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVVS1, L.VVVS3 ],
couplings = {(0,1):C.GC_235,(0,0):C.GC_274})
V_101 = Vertex(name = 'V_101',
particles = [ P.a, P.a, P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVSS2 ],
couplings = {(0,0):C.GC_59})
V_102 = Vertex(name = 'V_102',
particles = [ P.a, P.a, P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVS6 ],
couplings = {(0,0):C.GC_237})
V_103 = Vertex(name = 'V_103',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVVSS2, L.VVVSS3 ],
couplings = {(0,1):C.GC_140,(0,0):C.GC_57})
V_104 = Vertex(name = 'V_104',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVVS2, L.VVVS3 ],
couplings = {(0,1):C.GC_273,(0,0):C.GC_236})
V_105 = Vertex(name = 'V_105',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVSS2 ],
couplings = {(0,0):C.GC_85})
V_106 = Vertex(name = 'V_106',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVS6 ],
couplings = {(0,0):C.GC_245})
V_107 = Vertex(name = 'V_107',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVSS5 ],
couplings = {(0,0):C.GC_144})
V_108 = Vertex(name = 'V_108',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVS9 ],
couplings = {(0,0):C.GC_275})
V_109 = Vertex(name = 'V_109',
particles = [ P.Z, P.Z, P.H, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSSSS1 ],
couplings = {(0,0):C.GC_86})
V_110 = Vertex(name = 'V_110',
particles = [ P.Z, P.Z, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSSS1 ],
couplings = {(0,0):C.GC_246})
V_111 = Vertex(name = 'V_111',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVSS2 ],
couplings = {(0,0):C.GC_87})
V_112 = Vertex(name = 'V_112',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVVVS6 ],
couplings = {(0,0):C.GC_247})
V_113 = Vertex(name = 'V_113',
particles = [ P.H, P.H, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_72})
V_114 = Vertex(name = 'V_114',
particles = [ P.H, P.H, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_77})
V_115 = Vertex(name = 'V_115',
particles = [ P.H, P.H1, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_78})
V_116 = Vertex(name = 'V_116',
particles = [ P.H1, P.H1, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.SSSS1 ],
couplings = {(0,0):C.GC_79})
V_117 = Vertex(name = 'V_117',
particles = [ P.H, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_240})
V_118 = Vertex(name = 'V_118',
particles = [ P.H, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_241})
V_119 = Vertex(name = 'V_119',
particles = [ P.H1, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_242})
V_120 = Vertex(name = 'V_120',
particles = [ P.a, P.W__minus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_69})
V_121 = Vertex(name = 'V_121',
particles = [ P.a, P.W1__minus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_74})
V_122 = Vertex(name = 'V_122',
particles = [ P.a, P.W1__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_69})
V_123 = Vertex(name = 'V_123',
particles = [ P.W__minus__, P.W1__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_91})
V_124 = Vertex(name = 'V_124',
particles = [ P.W__minus__, P.W1__plus__, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_95})
V_125 = Vertex(name = 'V_125',
particles = [ P.W__minus__, P.W1__plus__, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_99})
V_126 = Vertex(name = 'V_126',
particles = [ P.W__minus__, P.W1__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_248})
V_127 = Vertex(name = 'V_127',
particles = [ P.W__minus__, P.W1__plus__, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_250})
V_128 = Vertex(name = 'V_128',
particles = [ P.a, P.a, P.W__minus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_70})
V_129 = Vertex(name = 'V_129',
particles = [ P.W__minus__, P.W1__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_160})
V_130 = Vertex(name = 'V_130',
particles = [ P.W__minus__, P.W1__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_166})
V_131 = Vertex(name = 'V_131',
particles = [ P.W1__minus__, P.W1__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_95})
V_132 = Vertex(name = 'V_132',
particles = [ P.W1__minus__, P.W1__plus__, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_99})
V_133 = Vertex(name = 'V_133',
particles = [ P.W1__minus__, P.W1__plus__, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_103})
V_134 = Vertex(name = 'V_134',
particles = [ P.W1__minus__, P.W1__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_250})
V_135 = Vertex(name = 'V_135',
particles = [ P.W1__minus__, P.W1__plus__, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_252})
V_136 = Vertex(name = 'V_136',
particles = [ P.a, P.a, P.W1__minus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_75})
V_137 = Vertex(name = 'V_137',
particles = [ P.W1__minus__, P.W1__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_166})
V_138 = Vertex(name = 'V_138',
particles = [ P.W1__minus__, P.W1__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_169})
V_139 = Vertex(name = 'V_139',
particles = [ P.W__minus__, P.W__minus__, P.W1__plus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_96})
V_140 = Vertex(name = 'V_140',
particles = [ P.W__minus__, P.W1__minus__, P.W1__plus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_100})
V_141 = Vertex(name = 'V_141',
particles = [ P.W1__minus__, P.W1__minus__, P.W1__plus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_104})
V_142 = Vertex(name = 'V_142',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_91})
V_143 = Vertex(name = 'V_143',
particles = [ P.W__minus__, P.W__plus__, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_95})
V_144 = Vertex(name = 'V_144',
particles = [ P.W__minus__, P.W__plus__, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_248})
V_145 = Vertex(name = 'V_145',
particles = [ P.W__minus__, P.W__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_160})
V_146 = Vertex(name = 'V_146',
particles = [ P.W1__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_91})
V_147 = Vertex(name = 'V_147',
particles = [ P.W1__minus__, P.W__plus__, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_95})
V_148 = Vertex(name = 'V_148',
particles = [ P.W1__minus__, P.W__plus__, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_99})
V_149 = Vertex(name = 'V_149',
particles = [ P.W1__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_248})
V_150 = Vertex(name = 'V_150',
particles = [ P.W1__minus__, P.W__plus__, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_250})
V_151 = Vertex(name = 'V_151',
particles = [ P.a, P.a, P.W1__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_70})
V_152 = Vertex(name = 'V_152',
particles = [ P.W1__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_160})
V_153 = Vertex(name = 'V_153',
particles = [ P.W1__minus__, P.W__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVV4 ],
couplings = {(0,0):C.GC_166})
V_154 = Vertex(name = 'V_154',
particles = [ P.W__minus__, P.W__minus__, P.W1__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_92})
V_155 = Vertex(name = 'V_155',
particles = [ P.W__minus__, P.W1__minus__, P.W1__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_96})
V_156 = Vertex(name = 'V_156',
particles = [ P.W1__minus__, P.W1__minus__, P.W1__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_100})
V_157 = Vertex(name = 'V_157',
particles = [ P.W__minus__, P.W1__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_92})
V_158 = Vertex(name = 'V_158',
particles = [ P.W1__minus__, P.W1__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_96})
V_159 = Vertex(name = 'V_159',
particles = [ P.a, P.W__minus__, P.W1__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_161})
V_160 = Vertex(name = 'V_160',
particles = [ P.a, P.W1__minus__, P.W1__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_167})
V_161 = Vertex(name = 'V_161',
particles = [ P.a, P.W1__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_161})
V_162 = Vertex(name = 'V_162',
particles = [ P.Z, P.Z, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_93})
V_163 = Vertex(name = 'V_163',
particles = [ P.Z, P.Z, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_97})
V_164 = Vertex(name = 'V_164',
particles = [ P.Z, P.Z, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_249})
V_165 = Vertex(name = 'V_165',
particles = [ P.W__minus__, P.W1__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_94})
V_166 = Vertex(name = 'V_166',
particles = [ P.W1__minus__, P.W1__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_98})
V_167 = Vertex(name = 'V_167',
particles = [ P.W1__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_94})
V_168 = Vertex(name = 'V_168',
particles = [ P.a, P.W__minus__, P.W1__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_167})
V_169 = Vertex(name = 'V_169',
particles = [ P.a, P.W1__minus__, P.W1__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_170})
V_170 = Vertex(name = 'V_170',
particles = [ P.a, P.W__minus__, P.W__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_161})
V_171 = Vertex(name = 'V_171',
particles = [ P.a, P.W1__minus__, P.W__plus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV6 ],
couplings = {(0,0):C.GC_167})
V_172 = Vertex(name = 'V_172',
particles = [ P.Z, P.Z1, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_93})
V_173 = Vertex(name = 'V_173',
particles = [ P.Z, P.Z1, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_97})
V_174 = Vertex(name = 'V_174',
particles = [ P.Z, P.Z1, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_101})
V_175 = Vertex(name = 'V_175',
particles = [ P.Z, P.Z1, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_249})
V_176 = Vertex(name = 'V_176',
particles = [ P.Z, P.Z1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_251})
V_177 = Vertex(name = 'V_177',
particles = [ P.W__minus__, P.W1__plus__, P.Z, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_98})
V_178 = Vertex(name = 'V_178',
particles = [ P.W1__minus__, P.W1__plus__, P.Z, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_102})
V_179 = Vertex(name = 'V_179',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_94})
V_180 = Vertex(name = 'V_180',
particles = [ P.W1__minus__, P.W__plus__, P.Z, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_98})
V_181 = Vertex(name = 'V_181',
particles = [ P.Z1, P.Z1, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_97})
V_182 = Vertex(name = 'V_182',
particles = [ P.Z1, P.Z1, P.H, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_101})
V_183 = Vertex(name = 'V_183',
particles = [ P.Z1, P.Z1, P.H1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVSS2 ],
couplings = {(0,0):C.GC_105})
V_184 = Vertex(name = 'V_184',
particles = [ P.Z1, P.Z1, P.H ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_251})
V_185 = Vertex(name = 'V_185',
particles = [ P.Z1, P.Z1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS2 ],
couplings = {(0,0):C.GC_253})
V_186 = Vertex(name = 'V_186',
particles = [ P.W__minus__, P.W1__plus__, P.Z1, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_102})
V_187 = Vertex(name = 'V_187',
particles = [ P.W1__minus__, P.W1__plus__, P.Z1, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_106})
V_188 = Vertex(name = 'V_188',
particles = [ P.W__minus__, P.W__plus__, P.Z1, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_98})
V_189 = Vertex(name = 'V_189',
particles = [ P.W1__minus__, P.W__plus__, P.Z1, P.Z1 ],
color = [ '1' ],
lorentz = [ L.VVVV3 ],
couplings = {(0,0):C.GC_102})
V_190 = Vertex(name = 'V_190',
particles = [ P.e__plus__, P.e__minus__, P.a ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_4,(0,1):C.GC_584})
V_191 = Vertex(name = 'V_191',
particles = [ P.e__plus__, P.e__minus__, P.a ],
color = [ '1' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_592})
V_192 = Vertex(name = 'V_192',
particles = [ P.mu__plus__, P.mu__minus__, P.a ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_4,(0,1):C.GC_641})
V_193 = Vertex(name = 'V_193',
particles = [ P.mu__plus__, P.mu__minus__, P.a ],
color = [ '1' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_649})
V_194 = Vertex(name = 'V_194',
particles = [ P.ta__plus__, P.ta__minus__, P.a ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_4,(0,1):C.GC_912})
V_195 = Vertex(name = 'V_195',
particles = [ P.ta__plus__, P.ta__minus__, P.a ],
color = [ '1' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_920})
V_196 = Vertex(name = 'V_196',
particles = [ P.e__plus__, P.e__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_173,(0,1):C.GC_118,(0,2):C.GC_300,(0,3):C.GC_585})
V_197 = Vertex(name = 'V_197',
particles = [ P.e__plus__, P.e__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_321,(0,1):C.GC_301,(0,2):C.GC_338,(0,3):C.GC_591})
V_198 = Vertex(name = 'V_198',
particles = [ P.e__plus__, P.e__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6 ],
couplings = {(0,0):C.GC_341,(0,1):C.GC_331,(0,2):C.GC_344})
V_199 = Vertex(name = 'V_199',
particles = [ P.e__plus__, P.e__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_334})
V_200 = Vertex(name = 'V_200',
particles = [ P.mu__plus__, P.mu__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_173,(0,1):C.GC_118,(0,2):C.GC_300,(0,3):C.GC_642})
V_201 = Vertex(name = 'V_201',
particles = [ P.mu__plus__, P.mu__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_321,(0,1):C.GC_301,(0,2):C.GC_338,(0,3):C.GC_648})
V_202 = Vertex(name = 'V_202',
particles = [ P.mu__plus__, P.mu__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6 ],
couplings = {(0,0):C.GC_341,(0,1):C.GC_331,(0,2):C.GC_344})
V_203 = Vertex(name = 'V_203',
particles = [ P.mu__plus__, P.mu__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_334})
V_204 = Vertex(name = 'V_204',
particles = [ P.ta__plus__, P.ta__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_173,(0,1):C.GC_118,(0,2):C.GC_300,(0,3):C.GC_913})
V_205 = Vertex(name = 'V_205',
particles = [ P.ta__plus__, P.ta__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_321,(0,1):C.GC_301,(0,2):C.GC_338,(0,3):C.GC_919})
V_206 = Vertex(name = 'V_206',
particles = [ P.ta__plus__, P.ta__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV6 ],
couplings = {(0,0):C.GC_341,(0,1):C.GC_331,(0,2):C.GC_344})
V_207 = Vertex(name = 'V_207',
particles = [ P.ta__plus__, P.ta__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_334})
V_208 = Vertex(name = 'V_208',
particles = [ P.d__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_1,(0,1):C.GC_528})
V_209 = Vertex(name = 'V_209',
particles = [ P.d__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_537})
V_210 = Vertex(name = 'V_210',
particles = [ P.d__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_2730})
V_211 = Vertex(name = 'V_211',
particles = [ P.d__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_2742})
V_212 = Vertex(name = 'V_212',
particles = [ P.s__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_1,(0,1):C.GC_701})
V_213 = Vertex(name = 'V_213',
particles = [ P.s__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_710})
V_214 = Vertex(name = 'V_214',
particles = [ P.s__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_3481})
V_215 = Vertex(name = 'V_215',
particles = [ P.s__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_3513})
V_216 = Vertex(name = 'V_216',
particles = [ P.b__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_1,(0,1):C.GC_431})
V_217 = Vertex(name = 'V_217',
particles = [ P.b__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_440})
V_218 = Vertex(name = 'V_218',
particles = [ P.b__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_4199})
V_219 = Vertex(name = 'V_219',
particles = [ P.b__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_4231})
V_220 = Vertex(name = 'V_220',
particles = [ P.u__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_2,(0,1):C.GC_1001})
V_221 = Vertex(name = 'V_221',
particles = [ P.u__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_1013})
V_222 = Vertex(name = 'V_222',
particles = [ P.u__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_4031})
V_223 = Vertex(name = 'V_223',
particles = [ P.u__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_4053})
V_224 = Vertex(name = 'V_224',
particles = [ P.c__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_2,(0,1):C.GC_470})
V_225 = Vertex(name = 'V_225',
particles = [ P.c__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_482})
V_226 = Vertex(name = 'V_226',
particles = [ P.c__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_2514})
V_227 = Vertex(name = 'V_227',
particles = [ P.c__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_2546})
V_228 = Vertex(name = 'V_228',
particles = [ P.t__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_2,(0,1):C.GC_779})
V_229 = Vertex(name = 'V_229',
particles = [ P.t__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_791})
V_230 = Vertex(name = 'V_230',
particles = [ P.t__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_3878})
V_231 = Vertex(name = 'V_231',
particles = [ P.t__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_3910})
V_232 = Vertex(name = 'V_232',
particles = [ P.d__tilde__, P.d, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_6,(0,1):C.GC_525})
V_233 = Vertex(name = 'V_233',
particles = [ P.d__tilde__, P.d, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_2731})
V_234 = Vertex(name = 'V_234',
particles = [ P.s__tilde__, P.s, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_6,(0,1):C.GC_698})
V_235 = Vertex(name = 'V_235',
particles = [ P.s__tilde__, P.s, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_3484})
V_236 = Vertex(name = 'V_236',
particles = [ P.b__tilde__, P.b, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_6,(0,1):C.GC_428})
V_237 = Vertex(name = 'V_237',
particles = [ P.b__tilde__, P.b, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_4202})
V_238 = Vertex(name = 'V_238',
particles = [ P.u__tilde__, P.u, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_6,(0,1):C.GC_1002})
V_239 = Vertex(name = 'V_239',
particles = [ P.u__tilde__, P.u, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_4033})
V_240 = Vertex(name = 'V_240',
particles = [ P.c__tilde__, P.c, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_6,(0,1):C.GC_471})
V_241 = Vertex(name = 'V_241',
particles = [ P.c__tilde__, P.c, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_2517})
V_242 = Vertex(name = 'V_242',
particles = [ P.t__tilde__, P.t, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1, L.FFV7 ],
couplings = {(0,0):C.GC_6,(0,1):C.GC_780})
V_243 = Vertex(name = 'V_243',
particles = [ P.t__tilde__, P.t, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV7 ],
couplings = {(0,0):C.GC_3881})
V_244 = Vertex(name = 'V_244',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_1004,(0,3):C.GC_527,(0,0):C.GC_108,(0,2):C.GC_1034})
V_245 = Vertex(name = 'V_245',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,1):C.GC_4037,(0,2):C.GC_2733,(0,0):C.GC_289})
V_246 = Vertex(name = 'V_246',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_309})
V_247 = Vertex(name = 'V_247',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_347})
V_248 = Vertex(name = 'V_248',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_348})
V_249 = Vertex(name = 'V_249',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_366})
V_250 = Vertex(name = 'V_250',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1108})
V_251 = Vertex(name = 'V_251',
particles = [ P.s__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_2659,(0,3):C.GC_3082,(0,0):C.GC_109,(0,2):C.GC_1065})
V_252 = Vertex(name = 'V_252',
particles = [ P.s__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_290})
V_253 = Vertex(name = 'V_253',
particles = [ P.s__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_310})
V_254 = Vertex(name = 'V_254',
particles = [ P.s__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_349})
V_255 = Vertex(name = 'V_255',
particles = [ P.s__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_350})
V_256 = Vertex(name = 'V_256',
particles = [ P.s__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_367})
V_257 = Vertex(name = 'V_257',
particles = [ P.s__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1109})
V_258 = Vertex(name = 'V_258',
particles = [ P.b__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_4036,(0,3):C.GC_3007,(0,0):C.GC_110,(0,2):C.GC_1025})
V_259 = Vertex(name = 'V_259',
particles = [ P.b__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_291})
V_260 = Vertex(name = 'V_260',
particles = [ P.b__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_311})
V_261 = Vertex(name = 'V_261',
particles = [ P.b__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_351})
V_262 = Vertex(name = 'V_262',
particles = [ P.b__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_352})
V_263 = Vertex(name = 'V_263',
particles = [ P.b__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_368})
V_264 = Vertex(name = 'V_264',
particles = [ P.b__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1110})
V_265 = Vertex(name = 'V_265',
particles = [ P.d__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_1687,(0,3):C.GC_3432,(0,0):C.GC_111,(0,2):C.GC_551})
V_266 = Vertex(name = 'V_266',
particles = [ P.d__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_292})
V_267 = Vertex(name = 'V_267',
particles = [ P.d__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_312})
V_268 = Vertex(name = 'V_268',
particles = [ P.d__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_353})
V_269 = Vertex(name = 'V_269',
particles = [ P.d__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_354})
V_270 = Vertex(name = 'V_270',
particles = [ P.d__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_369})
V_271 = Vertex(name = 'V_271',
particles = [ P.d__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_568})
V_272 = Vertex(name = 'V_272',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_473,(0,3):C.GC_700,(0,0):C.GC_112,(0,2):C.GC_724})
V_273 = Vertex(name = 'V_273',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,1):C.GC_2523,(0,2):C.GC_3490,(0,0):C.GC_293})
V_274 = Vertex(name = 'V_274',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_313})
V_275 = Vertex(name = 'V_275',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_355})
V_276 = Vertex(name = 'V_276',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_356})
V_277 = Vertex(name = 'V_277',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_370})
V_278 = Vertex(name = 'V_278',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_753})
V_279 = Vertex(name = 'V_279',
particles = [ P.b__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_3815,(0,3):C.GC_3409,(0,0):C.GC_113,(0,2):C.GC_494})
V_280 = Vertex(name = 'V_280',
particles = [ P.b__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_294})
V_281 = Vertex(name = 'V_281',
particles = [ P.b__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_314})
V_282 = Vertex(name = 'V_282',
particles = [ P.b__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_357})
V_283 = Vertex(name = 'V_283',
particles = [ P.b__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_358})
V_284 = Vertex(name = 'V_284',
particles = [ P.b__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_371})
V_285 = Vertex(name = 'V_285',
particles = [ P.b__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_509})
V_286 = Vertex(name = 'V_286',
particles = [ P.d__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_1720,(0,3):C.GC_4249,(0,0):C.GC_114,(0,2):C.GC_812})
V_287 = Vertex(name = 'V_287',
particles = [ P.d__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_295})
V_288 = Vertex(name = 'V_288',
particles = [ P.d__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_315})
V_289 = Vertex(name = 'V_289',
particles = [ P.d__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_359})
V_290 = Vertex(name = 'V_290',
particles = [ P.d__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_360})
V_291 = Vertex(name = 'V_291',
particles = [ P.d__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_372})
V_292 = Vertex(name = 'V_292',
particles = [ P.d__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_870})
V_293 = Vertex(name = 'V_293',
particles = [ P.s__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_2598,(0,3):C.GC_4280,(0,0):C.GC_115,(0,2):C.GC_843})
V_294 = Vertex(name = 'V_294',
particles = [ P.s__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_296})
V_295 = Vertex(name = 'V_295',
particles = [ P.s__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_316})
V_296 = Vertex(name = 'V_296',
particles = [ P.s__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_361})
V_297 = Vertex(name = 'V_297',
particles = [ P.s__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_362})
V_298 = Vertex(name = 'V_298',
particles = [ P.s__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_373})
V_299 = Vertex(name = 'V_299',
particles = [ P.s__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_871})
V_300 = Vertex(name = 'V_300',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_782,(0,3):C.GC_430,(0,0):C.GC_116,(0,2):C.GC_803})
V_301 = Vertex(name = 'V_301',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,1):C.GC_3887,(0,2):C.GC_4208,(0,0):C.GC_297})
V_302 = Vertex(name = 'V_302',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_317})
V_303 = Vertex(name = 'V_303',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_363})
V_304 = Vertex(name = 'V_304',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_364})
V_305 = Vertex(name = 'V_305',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_374})
V_306 = Vertex(name = 'V_306',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_872})
V_307 = Vertex(name = 'V_307',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_1032,(0,0):C.GC_122})
V_308 = Vertex(name = 'V_308',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1102})
V_309 = Vertex(name = 'V_309',
particles = [ P.s__tilde__, P.u, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_1063,(0,0):C.GC_123})
V_310 = Vertex(name = 'V_310',
particles = [ P.s__tilde__, P.u, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1103})
V_311 = Vertex(name = 'V_311',
particles = [ P.b__tilde__, P.u, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_1023,(0,0):C.GC_124})
V_312 = Vertex(name = 'V_312',
particles = [ P.b__tilde__, P.u, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1104})
V_313 = Vertex(name = 'V_313',
particles = [ P.d__tilde__, P.c, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_549,(0,0):C.GC_125})
V_314 = Vertex(name = 'V_314',
particles = [ P.d__tilde__, P.c, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_566})
V_315 = Vertex(name = 'V_315',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_722,(0,0):C.GC_126})
V_316 = Vertex(name = 'V_316',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_751})
V_317 = Vertex(name = 'V_317',
particles = [ P.b__tilde__, P.c, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_492,(0,0):C.GC_127})
V_318 = Vertex(name = 'V_318',
particles = [ P.b__tilde__, P.c, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_507})
V_319 = Vertex(name = 'V_319',
particles = [ P.d__tilde__, P.t, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_810,(0,0):C.GC_128})
V_320 = Vertex(name = 'V_320',
particles = [ P.d__tilde__, P.t, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_864})
V_321 = Vertex(name = 'V_321',
particles = [ P.s__tilde__, P.t, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_841,(0,0):C.GC_129})
V_322 = Vertex(name = 'V_322',
particles = [ P.s__tilde__, P.t, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_865})
V_323 = Vertex(name = 'V_323',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_801,(0,0):C.GC_130})
V_324 = Vertex(name = 'V_324',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_866})
V_325 = Vertex(name = 'V_325',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_990,(0,3):C.GC_513,(0,2):C.GC_1033,(0,0):C.GC_255})
V_326 = Vertex(name = 'V_326',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,1):C.GC_4013,(0,2):C.GC_2720,(0,0):C.GC_1105})
V_327 = Vertex(name = 'V_327',
particles = [ P.s__tilde__, P.u, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_2648,(0,3):C.GC_3071,(0,2):C.GC_1064,(0,0):C.GC_256})
V_328 = Vertex(name = 'V_328',
particles = [ P.s__tilde__, P.u, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_1106})
V_329 = Vertex(name = 'V_329',
particles = [ P.b__tilde__, P.u, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_4012,(0,3):C.GC_2996,(0,2):C.GC_1024,(0,0):C.GC_257})
V_330 = Vertex(name = 'V_330',
particles = [ P.b__tilde__, P.u, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_1107})
V_331 = Vertex(name = 'V_331',
particles = [ P.d__tilde__, P.c, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_1676,(0,3):C.GC_3421,(0,2):C.GC_550,(0,0):C.GC_258})
V_332 = Vertex(name = 'V_332',
particles = [ P.d__tilde__, P.c, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_567})
V_333 = Vertex(name = 'V_333',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_459,(0,3):C.GC_686,(0,2):C.GC_723,(0,0):C.GC_259})
V_334 = Vertex(name = 'V_334',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,1):C.GC_2488,(0,2):C.GC_3455,(0,0):C.GC_752})
V_335 = Vertex(name = 'V_335',
particles = [ P.b__tilde__, P.c, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_3804,(0,3):C.GC_3398,(0,2):C.GC_493,(0,0):C.GC_260})
V_336 = Vertex(name = 'V_336',
particles = [ P.b__tilde__, P.c, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_508})
V_337 = Vertex(name = 'V_337',
particles = [ P.d__tilde__, P.t, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_1709,(0,3):C.GC_4238,(0,2):C.GC_811,(0,0):C.GC_261})
V_338 = Vertex(name = 'V_338',
particles = [ P.d__tilde__, P.t, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_867})
V_339 = Vertex(name = 'V_339',
particles = [ P.s__tilde__, P.t, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_2587,(0,3):C.GC_4269,(0,2):C.GC_842,(0,0):C.GC_262})
V_340 = Vertex(name = 'V_340',
particles = [ P.s__tilde__, P.t, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_868})
V_341 = Vertex(name = 'V_341',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_766,(0,3):C.GC_416,(0,2):C.GC_802,(0,0):C.GC_263})
V_342 = Vertex(name = 'V_342',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,1):C.GC_3852,(0,2):C.GC_4173,(0,0):C.GC_869})
V_343 = Vertex(name = 'V_343',
particles = [ P.e__plus__, P.ve, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV8 ],
couplings = {(0,1):C.GC_583,(0,0):C.GC_107})
V_344 = Vertex(name = 'V_344',
particles = [ P.e__plus__, P.ve, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_308})
V_345 = Vertex(name = 'V_345',
particles = [ P.e__plus__, P.ve, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_319})
V_346 = Vertex(name = 'V_346',
particles = [ P.e__plus__, P.ve, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_346})
V_347 = Vertex(name = 'V_347',
particles = [ P.e__plus__, P.ve, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_365})
V_348 = Vertex(name = 'V_348',
particles = [ P.mu__plus__, P.vm, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV8 ],
couplings = {(0,1):C.GC_640,(0,0):C.GC_107})
V_349 = Vertex(name = 'V_349',
particles = [ P.mu__plus__, P.vm, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_308})
V_350 = Vertex(name = 'V_350',
particles = [ P.mu__plus__, P.vm, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_319})
V_351 = Vertex(name = 'V_351',
particles = [ P.mu__plus__, P.vm, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_346})
V_352 = Vertex(name = 'V_352',
particles = [ P.mu__plus__, P.vm, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_365})
V_353 = Vertex(name = 'V_353',
particles = [ P.ta__plus__, P.vt, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV8 ],
couplings = {(0,1):C.GC_911,(0,0):C.GC_107})
V_354 = Vertex(name = 'V_354',
particles = [ P.ta__plus__, P.vt, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_308})
V_355 = Vertex(name = 'V_355',
particles = [ P.ta__plus__, P.vt, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_319})
V_356 = Vertex(name = 'V_356',
particles = [ P.ta__plus__, P.vt, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_346})
V_357 = Vertex(name = 'V_357',
particles = [ P.ta__plus__, P.vt, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_365})
V_358 = Vertex(name = 'V_358',
particles = [ P.e__plus__, P.ve, P.W__minus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_121})
V_359 = Vertex(name = 'V_359',
particles = [ P.mu__plus__, P.vm, P.W__minus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_121})
V_360 = Vertex(name = 'V_360',
particles = [ P.ta__plus__, P.vt, P.W__minus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_121})
V_361 = Vertex(name = 'V_361',
particles = [ P.e__plus__, P.ve, P.W__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS5 ],
couplings = {(0,1):C.GC_571,(0,0):C.GC_254})
V_362 = Vertex(name = 'V_362',
particles = [ P.mu__plus__, P.vm, P.W__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS5 ],
couplings = {(0,1):C.GC_628,(0,0):C.GC_254})
V_363 = Vertex(name = 'V_363',
particles = [ P.ta__plus__, P.vt, P.W__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS5 ],
couplings = {(0,1):C.GC_899,(0,0):C.GC_254})
V_364 = Vertex(name = 'V_364',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_527,(0,3):C.GC_1004,(0,0):C.GC_1187,(0,2):C.GC_1249})
V_365 = Vertex(name = 'V_365',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,1):C.GC_2733,(0,2):C.GC_4037,(0,0):C.GC_1191})
V_366 = Vertex(name = 'V_366',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1192})
V_367 = Vertex(name = 'V_367',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1193})
V_368 = Vertex(name = 'V_368',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1194})
V_369 = Vertex(name = 'V_369',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1195})
V_370 = Vertex(name = 'V_370',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1297})
V_371 = Vertex(name = 'V_371',
particles = [ P.c__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_3044,(0,3):C.GC_2522,(0,0):C.GC_1884,(0,2):C.GC_1923})
V_372 = Vertex(name = 'V_372',
particles = [ P.c__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1888})
V_373 = Vertex(name = 'V_373',
particles = [ P.c__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1889})
V_374 = Vertex(name = 'V_374',
particles = [ P.c__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1890})
V_375 = Vertex(name = 'V_375',
particles = [ P.c__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1891})
V_376 = Vertex(name = 'V_376',
particles = [ P.c__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1892})
V_377 = Vertex(name = 'V_377',
particles = [ P.c__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2004})
V_378 = Vertex(name = 'V_378',
particles = [ P.t__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_3045,(0,3):C.GC_3885,(0,0):C.GC_2775,(0,2):C.GC_2843})
V_379 = Vertex(name = 'V_379',
particles = [ P.t__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2780})
V_380 = Vertex(name = 'V_380',
particles = [ P.t__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2781})
V_381 = Vertex(name = 'V_381',
particles = [ P.t__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2782})
V_382 = Vertex(name = 'V_382',
particles = [ P.t__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2783})
V_383 = Vertex(name = 'V_383',
particles = [ P.t__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2784})
V_384 = Vertex(name = 'V_384',
particles = [ P.t__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2920})
V_385 = Vertex(name = 'V_385',
particles = [ P.u__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_3489,(0,3):C.GC_1777,(0,0):C.GC_1338,(0,2):C.GC_1432})
V_386 = Vertex(name = 'V_386',
particles = [ P.u__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1342})
V_387 = Vertex(name = 'V_387',
particles = [ P.u__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1343})
V_388 = Vertex(name = 'V_388',
particles = [ P.u__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1344})
V_389 = Vertex(name = 'V_389',
particles = [ P.u__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1345})
V_390 = Vertex(name = 'V_390',
particles = [ P.u__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1346})
V_391 = Vertex(name = 'V_391',
particles = [ P.u__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1472})
V_392 = Vertex(name = 'V_392',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_700,(0,3):C.GC_473,(0,0):C.GC_2060,(0,2):C.GC_2133})
V_393 = Vertex(name = 'V_393',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,1):C.GC_3490,(0,2):C.GC_2523,(0,0):C.GC_2064})
V_394 = Vertex(name = 'V_394',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2065})
V_395 = Vertex(name = 'V_395',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2066})
V_396 = Vertex(name = 'V_396',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2067})
V_397 = Vertex(name = 'V_397',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2068})
V_398 = Vertex(name = 'V_398',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2200})
V_399 = Vertex(name = 'V_399',
particles = [ P.t__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_3491,(0,3):C.GC_3886,(0,0):C.GC_3125,(0,2):C.GC_3225})
V_400 = Vertex(name = 'V_400',
particles = [ P.t__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3130})
V_401 = Vertex(name = 'V_401',
particles = [ P.t__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3131})
V_402 = Vertex(name = 'V_402',
particles = [ P.t__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3132})
V_403 = Vertex(name = 'V_403',
particles = [ P.t__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3133})
V_404 = Vertex(name = 'V_404',
particles = [ P.t__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3134})
V_405 = Vertex(name = 'V_405',
particles = [ P.t__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3289})
V_406 = Vertex(name = 'V_406',
particles = [ P.u__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_4206,(0,3):C.GC_1778,(0,0):C.GC_1513,(0,2):C.GC_1579})
V_407 = Vertex(name = 'V_407',
particles = [ P.u__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1517})
V_408 = Vertex(name = 'V_408',
particles = [ P.u__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1518})
V_409 = Vertex(name = 'V_409',
particles = [ P.u__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1519})
V_410 = Vertex(name = 'V_410',
particles = [ P.u__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1520})
V_411 = Vertex(name = 'V_411',
particles = [ P.u__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1521})
V_412 = Vertex(name = 'V_412',
particles = [ P.u__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1843})
V_413 = Vertex(name = 'V_413',
particles = [ P.c__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_4207,(0,3):C.GC_2524,(0,0):C.GC_2259,(0,2):C.GC_2290})
V_414 = Vertex(name = 'V_414',
particles = [ P.c__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2263})
V_415 = Vertex(name = 'V_415',
particles = [ P.c__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2264})
V_416 = Vertex(name = 'V_416',
particles = [ P.c__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2265})
V_417 = Vertex(name = 'V_417',
particles = [ P.c__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2266})
V_418 = Vertex(name = 'V_418',
particles = [ P.c__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2267})
V_419 = Vertex(name = 'V_419',
particles = [ P.c__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2400})
V_420 = Vertex(name = 'V_420',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV6, L.FFV8 ],
couplings = {(0,1):C.GC_430,(0,3):C.GC_782,(0,0):C.GC_3557,(0,2):C.GC_3621})
V_421 = Vertex(name = 'V_421',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,1):C.GC_4208,(0,2):C.GC_3887,(0,0):C.GC_3562})
V_422 = Vertex(name = 'V_422',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3563})
V_423 = Vertex(name = 'V_423',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3564})
V_424 = Vertex(name = 'V_424',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3565})
V_425 = Vertex(name = 'V_425',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3566})
V_426 = Vertex(name = 'V_426',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3965})
V_427 = Vertex(name = 'V_427',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_1247,(0,0):C.GC_1188})
V_428 = Vertex(name = 'V_428',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1295})
V_429 = Vertex(name = 'V_429',
particles = [ P.c__tilde__, P.d, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_1921,(0,0):C.GC_1885})
V_430 = Vertex(name = 'V_430',
particles = [ P.c__tilde__, P.d, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2002})
V_431 = Vertex(name = 'V_431',
particles = [ P.t__tilde__, P.d, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_2841,(0,0):C.GC_2776})
V_432 = Vertex(name = 'V_432',
particles = [ P.t__tilde__, P.d, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2918})
V_433 = Vertex(name = 'V_433',
particles = [ P.u__tilde__, P.s, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_1430,(0,0):C.GC_1339})
V_434 = Vertex(name = 'V_434',
particles = [ P.u__tilde__, P.s, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1470})
V_435 = Vertex(name = 'V_435',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_2131,(0,0):C.GC_2061})
V_436 = Vertex(name = 'V_436',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2198})
V_437 = Vertex(name = 'V_437',
particles = [ P.t__tilde__, P.s, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_3223,(0,0):C.GC_3126})
V_438 = Vertex(name = 'V_438',
particles = [ P.t__tilde__, P.s, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3287})
V_439 = Vertex(name = 'V_439',
particles = [ P.u__tilde__, P.b, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_1577,(0,0):C.GC_1514})
V_440 = Vertex(name = 'V_440',
particles = [ P.u__tilde__, P.b, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1841})
V_441 = Vertex(name = 'V_441',
particles = [ P.c__tilde__, P.b, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_2288,(0,0):C.GC_2260})
V_442 = Vertex(name = 'V_442',
particles = [ P.c__tilde__, P.b, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2398})
V_443 = Vertex(name = 'V_443',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,1):C.GC_3619,(0,0):C.GC_3558})
V_444 = Vertex(name = 'V_444',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3963})
V_445 = Vertex(name = 'V_445',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_513,(0,3):C.GC_990,(0,2):C.GC_1248,(0,0):C.GC_1190})
V_446 = Vertex(name = 'V_446',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,1):C.GC_2720,(0,2):C.GC_4013,(0,0):C.GC_1296})
V_447 = Vertex(name = 'V_447',
particles = [ P.c__tilde__, P.d, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_3022,(0,3):C.GC_2487,(0,2):C.GC_1922,(0,0):C.GC_1887})
V_448 = Vertex(name = 'V_448',
particles = [ P.c__tilde__, P.d, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_2003})
V_449 = Vertex(name = 'V_449',
particles = [ P.t__tilde__, P.d, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_3023,(0,3):C.GC_3850,(0,2):C.GC_2842,(0,0):C.GC_2779})
V_450 = Vertex(name = 'V_450',
particles = [ P.t__tilde__, P.d, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_2919})
V_451 = Vertex(name = 'V_451',
particles = [ P.u__tilde__, P.s, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_3454,(0,3):C.GC_1755,(0,2):C.GC_1431,(0,0):C.GC_1341})
V_452 = Vertex(name = 'V_452',
particles = [ P.u__tilde__, P.s, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_1471})
V_453 = Vertex(name = 'V_453',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_686,(0,3):C.GC_459,(0,2):C.GC_2132,(0,0):C.GC_2063})
V_454 = Vertex(name = 'V_454',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,1):C.GC_3455,(0,2):C.GC_2488,(0,0):C.GC_2199})
V_455 = Vertex(name = 'V_455',
particles = [ P.t__tilde__, P.s, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_3456,(0,3):C.GC_3851,(0,2):C.GC_3224,(0,0):C.GC_3129})
V_456 = Vertex(name = 'V_456',
particles = [ P.t__tilde__, P.s, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_3288})
V_457 = Vertex(name = 'V_457',
particles = [ P.u__tilde__, P.b, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_4171,(0,3):C.GC_1756,(0,2):C.GC_1578,(0,0):C.GC_1516})
V_458 = Vertex(name = 'V_458',
particles = [ P.u__tilde__, P.b, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_1842})
V_459 = Vertex(name = 'V_459',
particles = [ P.c__tilde__, P.b, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_4172,(0,3):C.GC_2489,(0,2):C.GC_2289,(0,0):C.GC_2262})
V_460 = Vertex(name = 'V_460',
particles = [ P.c__tilde__, P.b, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_2399})
V_461 = Vertex(name = 'V_461',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS3, L.FFVS5 ],
couplings = {(0,1):C.GC_416,(0,3):C.GC_766,(0,2):C.GC_3620,(0,0):C.GC_3561})
V_462 = Vertex(name = 'V_462',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,1):C.GC_4173,(0,2):C.GC_3852,(0,0):C.GC_3964})
V_463 = Vertex(name = 'V_463',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV5 ],
couplings = {(0,1):C.GC_583,(0,0):C.GC_107})
V_464 = Vertex(name = 'V_464',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_308})
V_465 = Vertex(name = 'V_465',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_319})
V_466 = Vertex(name = 'V_466',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_346})
V_467 = Vertex(name = 'V_467',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_365})
V_468 = Vertex(name = 'V_468',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV5 ],
couplings = {(0,1):C.GC_640,(0,0):C.GC_107})
V_469 = Vertex(name = 'V_469',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_308})
V_470 = Vertex(name = 'V_470',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_319})
V_471 = Vertex(name = 'V_471',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_346})
V_472 = Vertex(name = 'V_472',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_365})
V_473 = Vertex(name = 'V_473',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV5 ],
couplings = {(0,1):C.GC_911,(0,0):C.GC_107})
V_474 = Vertex(name = 'V_474',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_308})
V_475 = Vertex(name = 'V_475',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_319})
V_476 = Vertex(name = 'V_476',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_346})
V_477 = Vertex(name = 'V_477',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_365})
V_478 = Vertex(name = 'V_478',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_121})
V_479 = Vertex(name = 'V_479',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_121})
V_480 = Vertex(name = 'V_480',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_121})
V_481 = Vertex(name = 'V_481',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS2 ],
couplings = {(0,1):C.GC_571,(0,0):C.GC_254})
V_482 = Vertex(name = 'V_482',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS2 ],
couplings = {(0,1):C.GC_628,(0,0):C.GC_254})
V_483 = Vertex(name = 'V_483',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS2 ],
couplings = {(0,1):C.GC_899,(0,0):C.GC_254})
V_484 = Vertex(name = 'V_484',
particles = [ P.d__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_136,(0,1):C.GC_131})
V_485 = Vertex(name = 'V_485',
particles = [ P.d__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_138,(0,1):C.GC_558})
V_486 = Vertex(name = 'V_486',
particles = [ P.d__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2711})
V_487 = Vertex(name = 'V_487',
particles = [ P.d__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2712})
V_488 = Vertex(name = 'V_488',
particles = [ P.s__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2981})
V_489 = Vertex(name = 'V_489',
particles = [ P.s__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2983})
V_490 = Vertex(name = 'V_490',
particles = [ P.b__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2982})
V_491 = Vertex(name = 'V_491',
particles = [ P.b__tilde__, P.d, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2984})
V_492 = Vertex(name = 'V_492',
particles = [ P.d__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3377})
V_493 = Vertex(name = 'V_493',
particles = [ P.d__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3380})
V_494 = Vertex(name = 'V_494',
particles = [ P.s__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_136,(0,1):C.GC_131})
V_495 = Vertex(name = 'V_495',
particles = [ P.s__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_138,(0,1):C.GC_741})
V_496 = Vertex(name = 'V_496',
particles = [ P.s__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3378})
V_497 = Vertex(name = 'V_497',
particles = [ P.s__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3381})
V_498 = Vertex(name = 'V_498',
particles = [ P.b__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3379})
V_499 = Vertex(name = 'V_499',
particles = [ P.b__tilde__, P.s, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3382})
V_500 = Vertex(name = 'V_500',
particles = [ P.d__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_4144})
V_501 = Vertex(name = 'V_501',
particles = [ P.d__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_4147})
V_502 = Vertex(name = 'V_502',
particles = [ P.s__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_4145})
V_503 = Vertex(name = 'V_503',
particles = [ P.s__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_4148})
V_504 = Vertex(name = 'V_504',
particles = [ P.b__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_136,(0,1):C.GC_131})
V_505 = Vertex(name = 'V_505',
particles = [ P.b__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_138,(0,1):C.GC_452})
V_506 = Vertex(name = 'V_506',
particles = [ P.b__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_4146})
V_507 = Vertex(name = 'V_507',
particles = [ P.b__tilde__, P.b, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_4149})
V_508 = Vertex(name = 'V_508',
particles = [ P.d__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_269,(0,1):C.GC_264,(0,2):C.GC_515})
V_509 = Vertex(name = 'V_509',
particles = [ P.d__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_271,(0,1):C.GC_559,(0,2):C.GC_523})
V_510 = Vertex(name = 'V_510',
particles = [ P.d__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_2713,(0,1):C.GC_2721})
V_511 = Vertex(name = 'V_511',
particles = [ P.d__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_2714,(0,1):C.GC_2728})
V_512 = Vertex(name = 'V_512',
particles = [ P.s__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2985,(0,1):C.GC_3024,(0,2):C.GC_3072})
V_513 = Vertex(name = 'V_513',
particles = [ P.s__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2987,(0,1):C.GC_3034,(0,2):C.GC_3077})
V_514 = Vertex(name = 'V_514',
particles = [ P.b__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2986,(0,1):C.GC_3025,(0,2):C.GC_2997})
V_515 = Vertex(name = 'V_515',
particles = [ P.b__tilde__, P.d, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2988,(0,1):C.GC_3035,(0,2):C.GC_3002})
V_516 = Vertex(name = 'V_516',
particles = [ P.d__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3383,(0,1):C.GC_3457,(0,2):C.GC_3422})
V_517 = Vertex(name = 'V_517',
particles = [ P.d__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3386,(0,1):C.GC_3474,(0,2):C.GC_3427})
V_518 = Vertex(name = 'V_518',
particles = [ P.s__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_269,(0,1):C.GC_264,(0,2):C.GC_688})
V_519 = Vertex(name = 'V_519',
particles = [ P.s__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_271,(0,1):C.GC_742,(0,2):C.GC_696})
V_520 = Vertex(name = 'V_520',
particles = [ P.s__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_3384,(0,1):C.GC_3458})
V_521 = Vertex(name = 'V_521',
particles = [ P.s__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_3387,(0,1):C.GC_3475})
V_522 = Vertex(name = 'V_522',
particles = [ P.b__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3385,(0,1):C.GC_3459,(0,2):C.GC_3399})
V_523 = Vertex(name = 'V_523',
particles = [ P.b__tilde__, P.s, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3388,(0,1):C.GC_3476,(0,2):C.GC_3404})
V_524 = Vertex(name = 'V_524',
particles = [ P.d__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4150,(0,1):C.GC_4174,(0,2):C.GC_4239})
V_525 = Vertex(name = 'V_525',
particles = [ P.d__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4153,(0,1):C.GC_4191,(0,2):C.GC_4244})
V_526 = Vertex(name = 'V_526',
particles = [ P.s__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4151,(0,1):C.GC_4175,(0,2):C.GC_4270})
V_527 = Vertex(name = 'V_527',
particles = [ P.s__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4154,(0,1):C.GC_4192,(0,2):C.GC_4275})
V_528 = Vertex(name = 'V_528',
particles = [ P.b__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_269,(0,1):C.GC_264,(0,2):C.GC_418})
V_529 = Vertex(name = 'V_529',
particles = [ P.b__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_271,(0,1):C.GC_453,(0,2):C.GC_426})
V_530 = Vertex(name = 'V_530',
particles = [ P.b__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_4152,(0,1):C.GC_4176})
V_531 = Vertex(name = 'V_531',
particles = [ P.b__tilde__, P.b, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_4155,(0,1):C.GC_4193})
V_532 = Vertex(name = 'V_532',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV3, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_171,(0,2):C.GC_336,(0,1):C.GC_118,(0,3):C.GC_298,(0,4):C.GC_529})
V_533 = Vertex(name = 'V_533',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV3, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_320,(0,2):C.GC_339,(0,1):C.GC_302,(0,3):C.GC_560,(0,4):C.GC_536})
V_534 = Vertex(name = 'V_534',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV3, L.FFV7 ],
couplings = {(0,1):C.GC_342,(0,0):C.GC_304,(0,2):C.GC_2734})
V_535 = Vertex(name = 'V_535',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV7 ],
couplings = {(0,0):C.GC_331,(0,1):C.GC_2741})
V_536 = Vertex(name = 'V_536',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_333})
V_537 = Vertex(name = 'V_537',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_334})
V_538 = Vertex(name = 'V_538',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2715})
V_539 = Vertex(name = 'V_539',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2716})
V_540 = Vertex(name = 'V_540',
particles = [ P.s__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2989,(0,1):C.GC_3046,(0,2):C.GC_3083})
V_541 = Vertex(name = 'V_541',
particles = [ P.s__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2991,(0,1):C.GC_3056,(0,2):C.GC_3088})
V_542 = Vertex(name = 'V_542',
particles = [ P.b__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2990,(0,1):C.GC_3047,(0,2):C.GC_3008})
V_543 = Vertex(name = 'V_543',
particles = [ P.b__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2992,(0,1):C.GC_3057,(0,2):C.GC_3013})
V_544 = Vertex(name = 'V_544',
particles = [ P.d__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3389,(0,1):C.GC_3492,(0,2):C.GC_3433})
V_545 = Vertex(name = 'V_545',
particles = [ P.d__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3392,(0,1):C.GC_3509,(0,2):C.GC_3438})
V_546 = Vertex(name = 'V_546',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV3, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_171,(0,2):C.GC_336,(0,1):C.GC_118,(0,3):C.GC_298,(0,4):C.GC_702})
V_547 = Vertex(name = 'V_547',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV3, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_320,(0,2):C.GC_339,(0,1):C.GC_302,(0,3):C.GC_743,(0,4):C.GC_709})
V_548 = Vertex(name = 'V_548',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV3, L.FFV7 ],
couplings = {(0,1):C.GC_342,(0,0):C.GC_304,(0,2):C.GC_3493})
V_549 = Vertex(name = 'V_549',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV7 ],
couplings = {(0,0):C.GC_331,(0,1):C.GC_3510})
V_550 = Vertex(name = 'V_550',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_333})
V_551 = Vertex(name = 'V_551',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_334})
V_552 = Vertex(name = 'V_552',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3390})
V_553 = Vertex(name = 'V_553',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3393})
V_554 = Vertex(name = 'V_554',
particles = [ P.b__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3391,(0,1):C.GC_3494,(0,2):C.GC_3410})
V_555 = Vertex(name = 'V_555',
particles = [ P.b__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3394,(0,1):C.GC_3511,(0,2):C.GC_3415})
V_556 = Vertex(name = 'V_556',
particles = [ P.d__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4156,(0,1):C.GC_4209,(0,2):C.GC_4250})
V_557 = Vertex(name = 'V_557',
particles = [ P.d__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4159,(0,1):C.GC_4226,(0,2):C.GC_4255})
V_558 = Vertex(name = 'V_558',
particles = [ P.s__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4157,(0,1):C.GC_4210,(0,2):C.GC_4281})
V_559 = Vertex(name = 'V_559',
particles = [ P.s__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4160,(0,1):C.GC_4227,(0,2):C.GC_4286})
V_560 = Vertex(name = 'V_560',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV3, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_171,(0,2):C.GC_336,(0,1):C.GC_118,(0,3):C.GC_298,(0,4):C.GC_432})
V_561 = Vertex(name = 'V_561',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV3, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_320,(0,2):C.GC_339,(0,1):C.GC_302,(0,3):C.GC_454,(0,4):C.GC_439})
V_562 = Vertex(name = 'V_562',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV3, L.FFV7 ],
couplings = {(0,1):C.GC_342,(0,0):C.GC_304,(0,2):C.GC_4211})
V_563 = Vertex(name = 'V_563',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV7 ],
couplings = {(0,0):C.GC_331,(0,1):C.GC_4228})
V_564 = Vertex(name = 'V_564',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_333})
V_565 = Vertex(name = 'V_565',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_334})
V_566 = Vertex(name = 'V_566',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_4158})
V_567 = Vertex(name = 'V_567',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_4161})
V_568 = Vertex(name = 'V_568',
particles = [ P.e__plus__, P.e__minus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_133,(0,1):C.GC_132})
V_569 = Vertex(name = 'V_569',
particles = [ P.e__plus__, P.e__minus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_135})
V_570 = Vertex(name = 'V_570',
particles = [ P.mu__plus__, P.mu__minus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_133,(0,1):C.GC_132})
V_571 = Vertex(name = 'V_571',
particles = [ P.mu__plus__, P.mu__minus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_135})
V_572 = Vertex(name = 'V_572',
particles = [ P.ta__plus__, P.ta__minus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_133,(0,1):C.GC_132})
V_573 = Vertex(name = 'V_573',
particles = [ P.ta__plus__, P.ta__minus__, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_135})
V_574 = Vertex(name = 'V_574',
particles = [ P.e__plus__, P.e__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_266,(0,1):C.GC_265,(0,2):C.GC_573})
V_575 = Vertex(name = 'V_575',
particles = [ P.e__plus__, P.e__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_268,(0,1):C.GC_580})
V_576 = Vertex(name = 'V_576',
particles = [ P.mu__plus__, P.mu__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_266,(0,1):C.GC_265,(0,2):C.GC_630})
V_577 = Vertex(name = 'V_577',
particles = [ P.mu__plus__, P.mu__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_268,(0,1):C.GC_637})
V_578 = Vertex(name = 'V_578',
particles = [ P.ta__plus__, P.ta__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_266,(0,1):C.GC_265,(0,2):C.GC_901})
V_579 = Vertex(name = 'V_579',
particles = [ P.ta__plus__, P.ta__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_268,(0,1):C.GC_908})
V_580 = Vertex(name = 'V_580',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV4, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_172,(0,2):C.GC_337,(0,1):C.GC_117,(0,3):C.GC_305,(0,4):C.GC_1005})
V_581 = Vertex(name = 'V_581',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV4, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_307,(0,2):C.GC_340,(0,1):C.GC_302,(0,3):C.GC_1088,(0,4):C.GC_1012})
V_582 = Vertex(name = 'V_582',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV4, L.FFV7 ],
couplings = {(0,1):C.GC_343,(0,0):C.GC_303,(0,2):C.GC_4039})
V_583 = Vertex(name = 'V_583',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV7 ],
couplings = {(0,0):C.GC_330,(0,1):C.GC_4051})
V_584 = Vertex(name = 'V_584',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_332})
V_585 = Vertex(name = 'V_585',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_335})
V_586 = Vertex(name = 'V_586',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3796})
V_587 = Vertex(name = 'V_587',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3799})
V_588 = Vertex(name = 'V_588',
particles = [ P.c__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2472,(0,1):C.GC_2660,(0,2):C.GC_2525})
V_589 = Vertex(name = 'V_589',
particles = [ P.c__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2475,(0,1):C.GC_2665,(0,2):C.GC_2542})
V_590 = Vertex(name = 'V_590',
particles = [ P.t__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3793,(0,1):C.GC_4038,(0,2):C.GC_3888})
V_591 = Vertex(name = 'V_591',
particles = [ P.t__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3797,(0,1):C.GC_4050,(0,2):C.GC_3905})
V_592 = Vertex(name = 'V_592',
particles = [ P.u__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1669,(0,1):C.GC_1688,(0,2):C.GC_1779})
V_593 = Vertex(name = 'V_593',
particles = [ P.u__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1671,(0,1):C.GC_1693,(0,2):C.GC_1789})
V_594 = Vertex(name = 'V_594',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV4, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_172,(0,2):C.GC_337,(0,1):C.GC_117,(0,3):C.GC_305,(0,4):C.GC_474})
V_595 = Vertex(name = 'V_595',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV4, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_307,(0,2):C.GC_340,(0,1):C.GC_302,(0,3):C.GC_503,(0,4):C.GC_481})
V_596 = Vertex(name = 'V_596',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV4, L.FFV7 ],
couplings = {(0,1):C.GC_343,(0,0):C.GC_303,(0,2):C.GC_2526})
V_597 = Vertex(name = 'V_597',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV7 ],
couplings = {(0,0):C.GC_330,(0,1):C.GC_2543})
V_598 = Vertex(name = 'V_598',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_332})
V_599 = Vertex(name = 'V_599',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_335})
V_600 = Vertex(name = 'V_600',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2473})
V_601 = Vertex(name = 'V_601',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2476})
V_602 = Vertex(name = 'V_602',
particles = [ P.t__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3794,(0,1):C.GC_3816,(0,2):C.GC_3889})
V_603 = Vertex(name = 'V_603',
particles = [ P.t__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3798,(0,1):C.GC_3821,(0,2):C.GC_3906})
V_604 = Vertex(name = 'V_604',
particles = [ P.u__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1670,(0,1):C.GC_1721,(0,2):C.GC_1780})
V_605 = Vertex(name = 'V_605',
particles = [ P.u__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1672,(0,1):C.GC_1726,(0,2):C.GC_1790})
V_606 = Vertex(name = 'V_606',
particles = [ P.c__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2474,(0,1):C.GC_2599,(0,2):C.GC_2527})
V_607 = Vertex(name = 'V_607',
particles = [ P.c__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2477,(0,1):C.GC_2604,(0,2):C.GC_2544})
V_608 = Vertex(name = 'V_608',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV4, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_172,(0,2):C.GC_337,(0,1):C.GC_117,(0,3):C.GC_305,(0,4):C.GC_783})
V_609 = Vertex(name = 'V_609',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2, L.FFV4, L.FFV6, L.FFV7 ],
couplings = {(0,0):C.GC_307,(0,2):C.GC_340,(0,1):C.GC_302,(0,3):C.GC_852,(0,4):C.GC_790})
V_610 = Vertex(name = 'V_610',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV4, L.FFV7 ],
couplings = {(0,1):C.GC_343,(0,0):C.GC_303,(0,2):C.GC_3890})
V_611 = Vertex(name = 'V_611',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV7 ],
couplings = {(0,0):C.GC_330,(0,1):C.GC_3907})
V_612 = Vertex(name = 'V_612',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_332})
V_613 = Vertex(name = 'V_613',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_335})
V_614 = Vertex(name = 'V_614',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3795})
V_615 = Vertex(name = 'V_615',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3800})
V_616 = Vertex(name = 'V_616',
particles = [ P.u__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_136,(0,1):C.GC_139})
V_617 = Vertex(name = 'V_617',
particles = [ P.u__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_137,(0,1):C.GC_1086})
V_618 = Vertex(name = 'V_618',
particles = [ P.u__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3780})
V_619 = Vertex(name = 'V_619',
particles = [ P.u__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3783})
V_620 = Vertex(name = 'V_620',
particles = [ P.c__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2460})
V_621 = Vertex(name = 'V_621',
particles = [ P.c__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2463})
V_622 = Vertex(name = 'V_622',
particles = [ P.t__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3777})
V_623 = Vertex(name = 'V_623',
particles = [ P.t__tilde__, P.u, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3781})
V_624 = Vertex(name = 'V_624',
particles = [ P.u__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1661})
V_625 = Vertex(name = 'V_625',
particles = [ P.u__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1663})
V_626 = Vertex(name = 'V_626',
particles = [ P.c__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_136,(0,1):C.GC_139})
V_627 = Vertex(name = 'V_627',
particles = [ P.c__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_137,(0,1):C.GC_501})
V_628 = Vertex(name = 'V_628',
particles = [ P.c__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2461})
V_629 = Vertex(name = 'V_629',
particles = [ P.c__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2464})
V_630 = Vertex(name = 'V_630',
particles = [ P.t__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3778})
V_631 = Vertex(name = 'V_631',
particles = [ P.t__tilde__, P.c, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3782})
V_632 = Vertex(name = 'V_632',
particles = [ P.u__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1662})
V_633 = Vertex(name = 'V_633',
particles = [ P.u__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_1664})
V_634 = Vertex(name = 'V_634',
particles = [ P.c__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2462})
V_635 = Vertex(name = 'V_635',
particles = [ P.c__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_2465})
V_636 = Vertex(name = 'V_636',
particles = [ P.t__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_136,(0,1):C.GC_139})
V_637 = Vertex(name = 'V_637',
particles = [ P.t__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1, L.FFVSS2 ],
couplings = {(0,0):C.GC_137,(0,1):C.GC_850})
V_638 = Vertex(name = 'V_638',
particles = [ P.t__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3779})
V_639 = Vertex(name = 'V_639',
particles = [ P.t__tilde__, P.t, P.Z, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_3784})
V_640 = Vertex(name = 'V_640',
particles = [ P.u__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_269,(0,1):C.GC_272,(0,2):C.GC_991})
V_641 = Vertex(name = 'V_641',
particles = [ P.u__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_270,(0,1):C.GC_1087,(0,2):C.GC_999})
V_642 = Vertex(name = 'V_642',
particles = [ P.u__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_3788,(0,1):C.GC_4015})
V_643 = Vertex(name = 'V_643',
particles = [ P.u__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_3791,(0,1):C.GC_4027})
V_644 = Vertex(name = 'V_644',
particles = [ P.c__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2466,(0,1):C.GC_2649,(0,2):C.GC_2490})
V_645 = Vertex(name = 'V_645',
particles = [ P.c__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2469,(0,1):C.GC_2654,(0,2):C.GC_2507})
V_646 = Vertex(name = 'V_646',
particles = [ P.t__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3785,(0,1):C.GC_4014,(0,2):C.GC_3853})
V_647 = Vertex(name = 'V_647',
particles = [ P.t__tilde__, P.u, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3789,(0,1):C.GC_4026,(0,2):C.GC_3870})
V_648 = Vertex(name = 'V_648',
particles = [ P.u__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1665,(0,1):C.GC_1677,(0,2):C.GC_1757})
V_649 = Vertex(name = 'V_649',
particles = [ P.u__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1667,(0,1):C.GC_1682,(0,2):C.GC_1767})
V_650 = Vertex(name = 'V_650',
particles = [ P.c__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_269,(0,1):C.GC_272,(0,2):C.GC_460})
V_651 = Vertex(name = 'V_651',
particles = [ P.c__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_270,(0,1):C.GC_502,(0,2):C.GC_468})
V_652 = Vertex(name = 'V_652',
particles = [ P.c__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_2467,(0,1):C.GC_2491})
V_653 = Vertex(name = 'V_653',
particles = [ P.c__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_2470,(0,1):C.GC_2508})
V_654 = Vertex(name = 'V_654',
particles = [ P.t__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3786,(0,1):C.GC_3805,(0,2):C.GC_3854})
V_655 = Vertex(name = 'V_655',
particles = [ P.t__tilde__, P.c, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3790,(0,1):C.GC_3810,(0,2):C.GC_3871})
V_656 = Vertex(name = 'V_656',
particles = [ P.u__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1666,(0,1):C.GC_1710,(0,2):C.GC_1758})
V_657 = Vertex(name = 'V_657',
particles = [ P.u__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1668,(0,1):C.GC_1715,(0,2):C.GC_1768})
V_658 = Vertex(name = 'V_658',
particles = [ P.c__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2468,(0,1):C.GC_2588,(0,2):C.GC_2492})
V_659 = Vertex(name = 'V_659',
particles = [ P.c__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2471,(0,1):C.GC_2593,(0,2):C.GC_2509})
V_660 = Vertex(name = 'V_660',
particles = [ P.t__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_269,(0,1):C.GC_272,(0,2):C.GC_767})
V_661 = Vertex(name = 'V_661',
particles = [ P.t__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS3, L.FFVS4 ],
couplings = {(0,0):C.GC_270,(0,1):C.GC_851,(0,2):C.GC_777})
V_662 = Vertex(name = 'V_662',
particles = [ P.t__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_3787,(0,1):C.GC_3855})
V_663 = Vertex(name = 'V_663',
particles = [ P.t__tilde__, P.t, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS1, L.FFVS4 ],
couplings = {(0,0):C.GC_3792,(0,1):C.GC_3872})
V_664 = Vertex(name = 'V_664',
particles = [ P.ve__tilde__, P.ve, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_117})
V_665 = Vertex(name = 'V_665',
particles = [ P.ve__tilde__, P.ve, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_299})
V_666 = Vertex(name = 'V_666',
particles = [ P.ve__tilde__, P.ve, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_301})
V_667 = Vertex(name = 'V_667',
particles = [ P.ve__tilde__, P.ve, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_306})
V_668 = Vertex(name = 'V_668',
particles = [ P.vm__tilde__, P.vm, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_117})
V_669 = Vertex(name = 'V_669',
particles = [ P.vm__tilde__, P.vm, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_299})
V_670 = Vertex(name = 'V_670',
particles = [ P.vm__tilde__, P.vm, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_301})
V_671 = Vertex(name = 'V_671',
particles = [ P.vm__tilde__, P.vm, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_306})
V_672 = Vertex(name = 'V_672',
particles = [ P.vt__tilde__, P.vt, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_117})
V_673 = Vertex(name = 'V_673',
particles = [ P.vt__tilde__, P.vt, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_299})
V_674 = Vertex(name = 'V_674',
particles = [ P.vt__tilde__, P.vt, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_301})
V_675 = Vertex(name = 'V_675',
particles = [ P.vt__tilde__, P.vt, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_306})
V_676 = Vertex(name = 'V_676',
particles = [ P.ve__tilde__, P.ve, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_133})
V_677 = Vertex(name = 'V_677',
particles = [ P.ve__tilde__, P.ve, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_134})
V_678 = Vertex(name = 'V_678',
particles = [ P.vm__tilde__, P.vm, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_133})
V_679 = Vertex(name = 'V_679',
particles = [ P.vm__tilde__, P.vm, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_134})
V_680 = Vertex(name = 'V_680',
particles = [ P.vt__tilde__, P.vt, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_133})
V_681 = Vertex(name = 'V_681',
particles = [ P.vt__tilde__, P.vt, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFVSS1 ],
couplings = {(0,0):C.GC_134})
V_682 = Vertex(name = 'V_682',
particles = [ P.ve__tilde__, P.ve, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_266})
V_683 = Vertex(name = 'V_683',
particles = [ P.ve__tilde__, P.ve, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_267})
V_684 = Vertex(name = 'V_684',
particles = [ P.vm__tilde__, P.vm, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_266})
V_685 = Vertex(name = 'V_685',
particles = [ P.vm__tilde__, P.vm, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_267})
V_686 = Vertex(name = 'V_686',
particles = [ P.vt__tilde__, P.vt, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_266})
V_687 = Vertex(name = 'V_687',
particles = [ P.vt__tilde__, P.vt, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS1 ],
couplings = {(0,0):C.GC_267})
V_688 = Vertex(name = 'V_688',
particles = [ P.t__tilde__, P.t1, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_68})
V_689 = Vertex(name = 'V_689',
particles = [ P.t1__tilde__, P.t1, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_73})
V_690 = Vertex(name = 'V_690',
particles = [ P.t1__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_68})
V_691 = Vertex(name = 'V_691',
particles = [ P.t__tilde__, P.t1, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_71})
V_692 = Vertex(name = 'V_692',
particles = [ P.t1__tilde__, P.t1, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_76})
V_693 = Vertex(name = 'V_693',
particles = [ P.t1__tilde__, P.t, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_71})
V_694 = Vertex(name = 'V_694',
particles = [ P.d__tilde__, P.t1, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_155})
V_695 = Vertex(name = 'V_695',
particles = [ P.s__tilde__, P.t1, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_156})
V_696 = Vertex(name = 'V_696',
particles = [ P.b__tilde__, P.t1, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_157})
V_697 = Vertex(name = 'V_697',
particles = [ P.d__tilde__, P.t1, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_162})
V_698 = Vertex(name = 'V_698',
particles = [ P.s__tilde__, P.t1, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_163})
V_699 = Vertex(name = 'V_699',
particles = [ P.b__tilde__, P.t1, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_164})
V_700 = Vertex(name = 'V_700',
particles = [ P.d__tilde__, P.u, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_149})
V_701 = Vertex(name = 'V_701',
particles = [ P.s__tilde__, P.u, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_150})
V_702 = Vertex(name = 'V_702',
particles = [ P.b__tilde__, P.u, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_151})
V_703 = Vertex(name = 'V_703',
particles = [ P.d__tilde__, P.c, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_152})
V_704 = Vertex(name = 'V_704',
particles = [ P.s__tilde__, P.c, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_153})
V_705 = Vertex(name = 'V_705',
particles = [ P.b__tilde__, P.c, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_154})
V_706 = Vertex(name = 'V_706',
particles = [ P.d__tilde__, P.t, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_155})
V_707 = Vertex(name = 'V_707',
particles = [ P.s__tilde__, P.t, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_156})
V_708 = Vertex(name = 'V_708',
particles = [ P.b__tilde__, P.t, P.W1__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_157})
V_709 = Vertex(name = 'V_709',
particles = [ P.e__plus__, P.ve, P.W1__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_148})
V_710 = Vertex(name = 'V_710',
particles = [ P.mu__plus__, P.vm, P.W1__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_148})
V_711 = Vertex(name = 'V_711',
particles = [ P.ta__plus__, P.vt, P.W1__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_148})
V_712 = Vertex(name = 'V_712',
particles = [ P.t1__tilde__, P.d, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2778})
V_713 = Vertex(name = 'V_713',
particles = [ P.t1__tilde__, P.s, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3128})
V_714 = Vertex(name = 'V_714',
particles = [ P.t1__tilde__, P.b, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3560})
V_715 = Vertex(name = 'V_715',
particles = [ P.u__tilde__, P.d, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1189})
V_716 = Vertex(name = 'V_716',
particles = [ P.c__tilde__, P.d, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1886})
V_717 = Vertex(name = 'V_717',
particles = [ P.t__tilde__, P.d, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2777})
V_718 = Vertex(name = 'V_718',
particles = [ P.u__tilde__, P.s, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1340})
V_719 = Vertex(name = 'V_719',
particles = [ P.c__tilde__, P.s, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2062})
V_720 = Vertex(name = 'V_720',
particles = [ P.t__tilde__, P.s, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3127})
V_721 = Vertex(name = 'V_721',
particles = [ P.u__tilde__, P.b, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_1515})
V_722 = Vertex(name = 'V_722',
particles = [ P.c__tilde__, P.b, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2261})
V_723 = Vertex(name = 'V_723',
particles = [ P.t__tilde__, P.b, P.W1__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3559})
V_724 = Vertex(name = 'V_724',
particles = [ P.ve__tilde__, P.e__minus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_148})
V_725 = Vertex(name = 'V_725',
particles = [ P.vm__tilde__, P.mu__minus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_148})
V_726 = Vertex(name = 'V_726',
particles = [ P.vt__tilde__, P.ta__minus__, P.W1__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_148})
V_727 = Vertex(name = 'V_727',
particles = [ P.t1__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_2777})
V_728 = Vertex(name = 'V_728',
particles = [ P.t1__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3127})
V_729 = Vertex(name = 'V_729',
particles = [ P.t1__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_3559})
V_730 = Vertex(name = 'V_730',
particles = [ P.t__tilde__, P.t1, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_182,(0,1):C.GC_158})
V_731 = Vertex(name = 'V_731',
particles = [ P.t1__tilde__, P.t1, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_184,(0,1):C.GC_165})
V_732 = Vertex(name = 'V_732',
particles = [ P.t1__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_182,(0,1):C.GC_158})
V_733 = Vertex(name = 'V_733',
particles = [ P.d__tilde__, P.d, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_181,(0,1):C.GC_159})
V_734 = Vertex(name = 'V_734',
particles = [ P.s__tilde__, P.s, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_181,(0,1):C.GC_159})
V_735 = Vertex(name = 'V_735',
particles = [ P.b__tilde__, P.b, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_181,(0,1):C.GC_159})
V_736 = Vertex(name = 'V_736',
particles = [ P.e__plus__, P.e__minus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_183,(0,1):C.GC_159})
V_737 = Vertex(name = 'V_737',
particles = [ P.mu__plus__, P.mu__minus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_183,(0,1):C.GC_159})
V_738 = Vertex(name = 'V_738',
particles = [ P.ta__plus__, P.ta__minus__, P.Z1 ],
color = [ '1' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_183,(0,1):C.GC_159})
V_739 = Vertex(name = 'V_739',
particles = [ P.t__tilde__, P.t1, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_184,(0,1):C.GC_165})
V_740 = Vertex(name = 'V_740',
particles = [ P.t1__tilde__, P.t1, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_185,(0,1):C.GC_168})
V_741 = Vertex(name = 'V_741',
particles = [ P.t1__tilde__, P.t, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_184,(0,1):C.GC_165})
V_742 = Vertex(name = 'V_742',
particles = [ P.u__tilde__, P.u, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_182,(0,1):C.GC_158})
V_743 = Vertex(name = 'V_743',
particles = [ P.c__tilde__, P.c, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_182,(0,1):C.GC_158})
V_744 = Vertex(name = 'V_744',
particles = [ P.t__tilde__, P.t, P.Z1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1, L.FFV2 ],
couplings = {(0,0):C.GC_182,(0,1):C.GC_158})
V_745 = Vertex(name = 'V_745',
particles = [ P.ve__tilde__, P.ve, P.Z1 ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_158})
V_746 = Vertex(name = 'V_746',
particles = [ P.vm__tilde__, P.vm, P.Z1 ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_158})
V_747 = Vertex(name = 'V_747',
particles = [ P.vt__tilde__, P.vt, P.Z1 ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_158})
V_748 = Vertex(name = 'V_748',
particles = [ P.d__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_510})
V_749 = Vertex(name = 'V_749',
particles = [ P.d__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_538})
V_750 = Vertex(name = 'V_750',
particles = [ P.d__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_539})
V_751 = Vertex(name = 'V_751',
particles = [ P.d__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_540})
V_752 = Vertex(name = 'V_752',
particles = [ P.d__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_541})
V_753 = Vertex(name = 'V_753',
particles = [ P.d__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_542})
V_754 = Vertex(name = 'V_754',
particles = [ P.d__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_2743})
V_755 = Vertex(name = 'V_755',
particles = [ P.s__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_3060,(0,1):C.GC_3090})
V_756 = Vertex(name = 'V_756',
particles = [ P.b__tilde__, P.d, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_3061,(0,1):C.GC_3015})
V_757 = Vertex(name = 'V_757',
particles = [ P.d__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_3515,(0,1):C.GC_3440})
V_758 = Vertex(name = 'V_758',
particles = [ P.s__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_683})
V_759 = Vertex(name = 'V_759',
particles = [ P.s__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_711})
V_760 = Vertex(name = 'V_760',
particles = [ P.s__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_712})
V_761 = Vertex(name = 'V_761',
particles = [ P.s__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_713})
V_762 = Vertex(name = 'V_762',
particles = [ P.s__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_714})
V_763 = Vertex(name = 'V_763',
particles = [ P.s__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_715})
V_764 = Vertex(name = 'V_764',
particles = [ P.s__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_3516})
V_765 = Vertex(name = 'V_765',
particles = [ P.b__tilde__, P.s, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_3517,(0,1):C.GC_3417})
V_766 = Vertex(name = 'V_766',
particles = [ P.d__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_4232,(0,1):C.GC_4257})
V_767 = Vertex(name = 'V_767',
particles = [ P.s__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_4233,(0,1):C.GC_4288})
V_768 = Vertex(name = 'V_768',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_413})
V_769 = Vertex(name = 'V_769',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_441})
V_770 = Vertex(name = 'V_770',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_442})
V_771 = Vertex(name = 'V_771',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_443})
V_772 = Vertex(name = 'V_772',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_444})
V_773 = Vertex(name = 'V_773',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_445})
V_774 = Vertex(name = 'V_774',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_4234})
V_775 = Vertex(name = 'V_775',
particles = [ P.d__tilde__, P.d, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_512})
V_776 = Vertex(name = 'V_776',
particles = [ P.d__tilde__, P.d, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_2719})
V_777 = Vertex(name = 'V_777',
particles = [ P.s__tilde__, P.d, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_3020,(0,1):C.GC_3070})
V_778 = Vertex(name = 'V_778',
particles = [ P.b__tilde__, P.d, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_3021,(0,1):C.GC_2995})
V_779 = Vertex(name = 'V_779',
particles = [ P.d__tilde__, P.s, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_3451,(0,1):C.GC_3420})
V_780 = Vertex(name = 'V_780',
particles = [ P.s__tilde__, P.s, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_685})
V_781 = Vertex(name = 'V_781',
particles = [ P.s__tilde__, P.s, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_3452})
V_782 = Vertex(name = 'V_782',
particles = [ P.b__tilde__, P.s, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_3453,(0,1):C.GC_3397})
V_783 = Vertex(name = 'V_783',
particles = [ P.d__tilde__, P.b, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_4168,(0,1):C.GC_4237})
V_784 = Vertex(name = 'V_784',
particles = [ P.s__tilde__, P.b, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_4169,(0,1):C.GC_4268})
V_785 = Vertex(name = 'V_785',
particles = [ P.b__tilde__, P.b, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_415})
V_786 = Vertex(name = 'V_786',
particles = [ P.b__tilde__, P.b, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_4170})
V_787 = Vertex(name = 'V_787',
particles = [ P.d__tilde__, P.d, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_526})
V_788 = Vertex(name = 'V_788',
particles = [ P.d__tilde__, P.d, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_2732})
V_789 = Vertex(name = 'V_789',
particles = [ P.s__tilde__, P.d, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_3042,(0,1):C.GC_3081})
V_790 = Vertex(name = 'V_790',
particles = [ P.b__tilde__, P.d, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_3043,(0,1):C.GC_3006})
V_791 = Vertex(name = 'V_791',
particles = [ P.d__tilde__, P.s, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_3486,(0,1):C.GC_3431})
V_792 = Vertex(name = 'V_792',
particles = [ P.s__tilde__, P.s, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_699})
V_793 = Vertex(name = 'V_793',
particles = [ P.s__tilde__, P.s, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_3487})
V_794 = Vertex(name = 'V_794',
particles = [ P.b__tilde__, P.s, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_3488,(0,1):C.GC_3408})
V_795 = Vertex(name = 'V_795',
particles = [ P.d__tilde__, P.b, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_4203,(0,1):C.GC_4248})
V_796 = Vertex(name = 'V_796',
particles = [ P.s__tilde__, P.b, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_4204,(0,1):C.GC_4279})
V_797 = Vertex(name = 'V_797',
particles = [ P.b__tilde__, P.b, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_429})
V_798 = Vertex(name = 'V_798',
particles = [ P.b__tilde__, P.b, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_4205})
V_799 = Vertex(name = 'V_799',
particles = [ P.e__plus__, P.e__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_569})
V_800 = Vertex(name = 'V_800',
particles = [ P.e__plus__, P.e__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_593})
V_801 = Vertex(name = 'V_801',
particles = [ P.e__plus__, P.e__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_594})
V_802 = Vertex(name = 'V_802',
particles = [ P.e__plus__, P.e__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_595})
V_803 = Vertex(name = 'V_803',
particles = [ P.e__plus__, P.e__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_596})
V_804 = Vertex(name = 'V_804',
particles = [ P.e__plus__, P.e__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_597})
V_805 = Vertex(name = 'V_805',
particles = [ P.mu__plus__, P.mu__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_626})
V_806 = Vertex(name = 'V_806',
particles = [ P.mu__plus__, P.mu__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_650})
V_807 = Vertex(name = 'V_807',
particles = [ P.mu__plus__, P.mu__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_651})
V_808 = Vertex(name = 'V_808',
particles = [ P.mu__plus__, P.mu__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_652})
V_809 = Vertex(name = 'V_809',
particles = [ P.mu__plus__, P.mu__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_653})
V_810 = Vertex(name = 'V_810',
particles = [ P.mu__plus__, P.mu__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_654})
V_811 = Vertex(name = 'V_811',
particles = [ P.ta__plus__, P.ta__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_897})
V_812 = Vertex(name = 'V_812',
particles = [ P.ta__plus__, P.ta__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_921})
V_813 = Vertex(name = 'V_813',
particles = [ P.ta__plus__, P.ta__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_922})
V_814 = Vertex(name = 'V_814',
particles = [ P.ta__plus__, P.ta__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_923})
V_815 = Vertex(name = 'V_815',
particles = [ P.ta__plus__, P.ta__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_924})
V_816 = Vertex(name = 'V_816',
particles = [ P.ta__plus__, P.ta__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_925})
V_817 = Vertex(name = 'V_817',
particles = [ P.e__plus__, P.e__minus__, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_570})
V_818 = Vertex(name = 'V_818',
particles = [ P.mu__plus__, P.mu__minus__, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_627})
V_819 = Vertex(name = 'V_819',
particles = [ P.ta__plus__, P.ta__minus__, P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_898})
V_820 = Vertex(name = 'V_820',
particles = [ P.e__plus__, P.e__minus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_582})
V_821 = Vertex(name = 'V_821',
particles = [ P.mu__plus__, P.mu__minus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_639})
V_822 = Vertex(name = 'V_822',
particles = [ P.ta__plus__, P.ta__minus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_910})
V_823 = Vertex(name = 'V_823',
particles = [ P.u__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_986})
V_824 = Vertex(name = 'V_824',
particles = [ P.u__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_1014})
V_825 = Vertex(name = 'V_825',
particles = [ P.u__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_1015})
V_826 = Vertex(name = 'V_826',
particles = [ P.u__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_1016})
V_827 = Vertex(name = 'V_827',
particles = [ P.u__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_1017})
V_828 = Vertex(name = 'V_828',
particles = [ P.u__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_1018})
V_829 = Vertex(name = 'V_829',
particles = [ P.u__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_4055})
V_830 = Vertex(name = 'V_830',
particles = [ P.c__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_2667,(0,1):C.GC_2548})
V_831 = Vertex(name = 'V_831',
particles = [ P.t__tilde__, P.u, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_4054,(0,1):C.GC_3911})
V_832 = Vertex(name = 'V_832',
particles = [ P.u__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_1695,(0,1):C.GC_1793})
V_833 = Vertex(name = 'V_833',
particles = [ P.c__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_455})
V_834 = Vertex(name = 'V_834',
particles = [ P.c__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_483})
V_835 = Vertex(name = 'V_835',
particles = [ P.c__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_484})
V_836 = Vertex(name = 'V_836',
particles = [ P.c__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_485})
V_837 = Vertex(name = 'V_837',
particles = [ P.c__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_486})
V_838 = Vertex(name = 'V_838',
particles = [ P.c__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_487})
V_839 = Vertex(name = 'V_839',
particles = [ P.c__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_2549})
V_840 = Vertex(name = 'V_840',
particles = [ P.t__tilde__, P.c, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_3823,(0,1):C.GC_3912})
V_841 = Vertex(name = 'V_841',
particles = [ P.u__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_1728,(0,1):C.GC_1794})
V_842 = Vertex(name = 'V_842',
particles = [ P.c__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS2, L.FFS3 ],
couplings = {(0,0):C.GC_2606,(0,1):C.GC_2550})
V_843 = Vertex(name = 'V_843',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_762})
V_844 = Vertex(name = 'V_844',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_792})
V_845 = Vertex(name = 'V_845',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_793})
V_846 = Vertex(name = 'V_846',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_794})
V_847 = Vertex(name = 'V_847',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_795})
V_848 = Vertex(name = 'V_848',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_796})
V_849 = Vertex(name = 'V_849',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_3913})
V_850 = Vertex(name = 'V_850',
particles = [ P.u__tilde__, P.u, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_989})
V_851 = Vertex(name = 'V_851',
particles = [ P.u__tilde__, P.u, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_4011})
V_852 = Vertex(name = 'V_852',
particles = [ P.c__tilde__, P.u, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_2647,(0,1):C.GC_2484})
V_853 = Vertex(name = 'V_853',
particles = [ P.t__tilde__, P.u, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_4010,(0,1):C.GC_3847})
V_854 = Vertex(name = 'V_854',
particles = [ P.u__tilde__, P.c, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_1675,(0,1):C.GC_1753})
V_855 = Vertex(name = 'V_855',
particles = [ P.c__tilde__, P.c, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_458})
V_856 = Vertex(name = 'V_856',
particles = [ P.c__tilde__, P.c, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_2485})
V_857 = Vertex(name = 'V_857',
particles = [ P.t__tilde__, P.c, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_3803,(0,1):C.GC_3848})
V_858 = Vertex(name = 'V_858',
particles = [ P.u__tilde__, P.t, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_1708,(0,1):C.GC_1754})
V_859 = Vertex(name = 'V_859',
particles = [ P.c__tilde__, P.t, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS2, L.FFSSS3 ],
couplings = {(0,0):C.GC_2586,(0,1):C.GC_2486})
V_860 = Vertex(name = 'V_860',
particles = [ P.t__tilde__, P.t, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_765})
V_861 = Vertex(name = 'V_861',
particles = [ P.t__tilde__, P.t, P.H, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSSS1 ],
couplings = {(0,0):C.GC_3849})
V_862 = Vertex(name = 'V_862',
particles = [ P.u__tilde__, P.u, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_1003})
V_863 = Vertex(name = 'V_863',
particles = [ P.u__tilde__, P.u, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_4035})
V_864 = Vertex(name = 'V_864',
particles = [ P.c__tilde__, P.u, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_2658,(0,1):C.GC_2519})
V_865 = Vertex(name = 'V_865',
particles = [ P.t__tilde__, P.u, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_4034,(0,1):C.GC_3882})
V_866 = Vertex(name = 'V_866',
particles = [ P.u__tilde__, P.c, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_1686,(0,1):C.GC_1775})
V_867 = Vertex(name = 'V_867',
particles = [ P.c__tilde__, P.c, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_472})
V_868 = Vertex(name = 'V_868',
particles = [ P.c__tilde__, P.c, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_2520})
V_869 = Vertex(name = 'V_869',
particles = [ P.t__tilde__, P.c, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_3814,(0,1):C.GC_3883})
V_870 = Vertex(name = 'V_870',
particles = [ P.u__tilde__, P.t, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_1719,(0,1):C.GC_1776})
V_871 = Vertex(name = 'V_871',
particles = [ P.c__tilde__, P.t, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS2, L.FFSS3 ],
couplings = {(0,0):C.GC_2597,(0,1):C.GC_2521})
V_872 = Vertex(name = 'V_872',
particles = [ P.t__tilde__, P.t, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_781})
V_873 = Vertex(name = 'V_873',
particles = [ P.t__tilde__, P.t, P.H, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFSS1 ],
couplings = {(0,0):C.GC_3884})
V_874 = Vertex(name = 'V_874',
particles = [ P.d__tilde__, P.d, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_519})
V_875 = Vertex(name = 'V_875',
particles = [ P.s__tilde__, P.s, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_692})
V_876 = Vertex(name = 'V_876',
particles = [ P.b__tilde__, P.b, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_422})
V_877 = Vertex(name = 'V_877',
particles = [ P.e__plus__, P.e__minus__, P.H1 ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_576})
V_878 = Vertex(name = 'V_878',
particles = [ P.mu__plus__, P.mu__minus__, P.H1 ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_633})
V_879 = Vertex(name = 'V_879',
particles = [ P.ta__plus__, P.ta__minus__, P.H1 ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_904})
V_880 = Vertex(name = 'V_880',
particles = [ P.t__tilde__, P.t1, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_771})
V_881 = Vertex(name = 'V_881',
particles = [ P.t__tilde__, P.t1, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_772})
V_882 = Vertex(name = 'V_882',
particles = [ P.t1__tilde__, P.t1, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_772})
V_883 = Vertex(name = 'V_883',
particles = [ P.t1__tilde__, P.t1, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_773})
V_884 = Vertex(name = 'V_884',
particles = [ P.t1__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_771})
V_885 = Vertex(name = 'V_885',
particles = [ P.t1__tilde__, P.t, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_772})
V_886 = Vertex(name = 'V_886',
particles = [ P.u__tilde__, P.u, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_995})
V_887 = Vertex(name = 'V_887',
particles = [ P.c__tilde__, P.c, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_464})
V_888 = Vertex(name = 'V_888',
particles = [ P.t__tilde__, P.t, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_771})
V_889 = Vertex(name = 'V_889',
particles = [ P.d__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_514})
V_890 = Vertex(name = 'V_890',
particles = [ P.d__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_524})
V_891 = Vertex(name = 'V_891',
particles = [ P.d__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_2717})
V_892 = Vertex(name = 'V_892',
particles = [ P.d__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_2729})
V_893 = Vertex(name = 'V_893',
particles = [ P.s__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3016,(0,1):C.GC_3068})
V_894 = Vertex(name = 'V_894',
particles = [ P.s__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3036,(0,1):C.GC_3078})
V_895 = Vertex(name = 'V_895',
particles = [ P.b__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3017,(0,1):C.GC_2993})
V_896 = Vertex(name = 'V_896',
particles = [ P.b__tilde__, P.d, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3037,(0,1):C.GC_3003})
V_897 = Vertex(name = 'V_897',
particles = [ P.d__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3445,(0,1):C.GC_3418})
V_898 = Vertex(name = 'V_898',
particles = [ P.d__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3477,(0,1):C.GC_3428})
V_899 = Vertex(name = 'V_899',
particles = [ P.s__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_687})
V_900 = Vertex(name = 'V_900',
particles = [ P.s__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_697})
V_901 = Vertex(name = 'V_901',
particles = [ P.s__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_3446})
V_902 = Vertex(name = 'V_902',
particles = [ P.s__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_3478})
V_903 = Vertex(name = 'V_903',
particles = [ P.b__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3447,(0,1):C.GC_3395})
V_904 = Vertex(name = 'V_904',
particles = [ P.b__tilde__, P.s, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3479,(0,1):C.GC_3405})
V_905 = Vertex(name = 'V_905',
particles = [ P.d__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4162,(0,1):C.GC_4235})
V_906 = Vertex(name = 'V_906',
particles = [ P.d__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4194,(0,1):C.GC_4245})
V_907 = Vertex(name = 'V_907',
particles = [ P.s__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4163,(0,1):C.GC_4266})
V_908 = Vertex(name = 'V_908',
particles = [ P.s__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4195,(0,1):C.GC_4276})
V_909 = Vertex(name = 'V_909',
particles = [ P.b__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_417})
V_910 = Vertex(name = 'V_910',
particles = [ P.b__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_427})
V_911 = Vertex(name = 'V_911',
particles = [ P.b__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_4164})
V_912 = Vertex(name = 'V_912',
particles = [ P.b__tilde__, P.b, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_4196})
V_913 = Vertex(name = 'V_913',
particles = [ P.s__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3038,(0,1):C.GC_3079})
V_914 = Vertex(name = 'V_914',
particles = [ P.s__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3058,(0,1):C.GC_3089})
V_915 = Vertex(name = 'V_915',
particles = [ P.b__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3039,(0,1):C.GC_3004})
V_916 = Vertex(name = 'V_916',
particles = [ P.b__tilde__, P.d, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3059,(0,1):C.GC_3014})
V_917 = Vertex(name = 'V_917',
particles = [ P.d__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3480,(0,1):C.GC_3429})
V_918 = Vertex(name = 'V_918',
particles = [ P.d__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3512,(0,1):C.GC_3439})
V_919 = Vertex(name = 'V_919',
particles = [ P.b__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3482,(0,1):C.GC_3406})
V_920 = Vertex(name = 'V_920',
particles = [ P.b__tilde__, P.s, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3514,(0,1):C.GC_3416})
V_921 = Vertex(name = 'V_921',
particles = [ P.d__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4197,(0,1):C.GC_4246})
V_922 = Vertex(name = 'V_922',
particles = [ P.d__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4229,(0,1):C.GC_4256})
V_923 = Vertex(name = 'V_923',
particles = [ P.s__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4198,(0,1):C.GC_4277})
V_924 = Vertex(name = 'V_924',
particles = [ P.s__tilde__, P.b, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4230,(0,1):C.GC_4287})
V_925 = Vertex(name = 'V_925',
particles = [ P.d__tilde__, P.d, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_511})
V_926 = Vertex(name = 'V_926',
particles = [ P.d__tilde__, P.d, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_2718})
V_927 = Vertex(name = 'V_927',
particles = [ P.s__tilde__, P.d, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3018,(0,1):C.GC_3069})
V_928 = Vertex(name = 'V_928',
particles = [ P.b__tilde__, P.d, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3019,(0,1):C.GC_2994})
V_929 = Vertex(name = 'V_929',
particles = [ P.d__tilde__, P.s, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3448,(0,1):C.GC_3419})
V_930 = Vertex(name = 'V_930',
particles = [ P.s__tilde__, P.s, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_684})
V_931 = Vertex(name = 'V_931',
particles = [ P.s__tilde__, P.s, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_3449})
V_932 = Vertex(name = 'V_932',
particles = [ P.b__tilde__, P.s, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3450,(0,1):C.GC_3396})
V_933 = Vertex(name = 'V_933',
particles = [ P.d__tilde__, P.b, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4165,(0,1):C.GC_4236})
V_934 = Vertex(name = 'V_934',
particles = [ P.s__tilde__, P.b, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4166,(0,1):C.GC_4267})
V_935 = Vertex(name = 'V_935',
particles = [ P.b__tilde__, P.b, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_414})
V_936 = Vertex(name = 'V_936',
particles = [ P.b__tilde__, P.b, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_4167})
V_937 = Vertex(name = 'V_937',
particles = [ P.s__tilde__, P.d, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3040,(0,1):C.GC_3080})
V_938 = Vertex(name = 'V_938',
particles = [ P.b__tilde__, P.d, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3041,(0,1):C.GC_3005})
V_939 = Vertex(name = 'V_939',
particles = [ P.d__tilde__, P.s, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3483,(0,1):C.GC_3430})
V_940 = Vertex(name = 'V_940',
particles = [ P.b__tilde__, P.s, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3485,(0,1):C.GC_3407})
V_941 = Vertex(name = 'V_941',
particles = [ P.d__tilde__, P.b, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4200,(0,1):C.GC_4247})
V_942 = Vertex(name = 'V_942',
particles = [ P.s__tilde__, P.b, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4201,(0,1):C.GC_4278})
V_943 = Vertex(name = 'V_943',
particles = [ P.d__tilde__, P.d, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_518})
V_944 = Vertex(name = 'V_944',
particles = [ P.d__tilde__, P.d, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_2724})
V_945 = Vertex(name = 'V_945',
particles = [ P.s__tilde__, P.d, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3028,(0,1):C.GC_3074})
V_946 = Vertex(name = 'V_946',
particles = [ P.b__tilde__, P.d, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3029,(0,1):C.GC_2999})
V_947 = Vertex(name = 'V_947',
particles = [ P.d__tilde__, P.s, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3464,(0,1):C.GC_3424})
V_948 = Vertex(name = 'V_948',
particles = [ P.s__tilde__, P.s, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_691})
V_949 = Vertex(name = 'V_949',
particles = [ P.s__tilde__, P.s, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_3465})
V_950 = Vertex(name = 'V_950',
particles = [ P.b__tilde__, P.s, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3466,(0,1):C.GC_3401})
V_951 = Vertex(name = 'V_951',
particles = [ P.d__tilde__, P.b, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4181,(0,1):C.GC_4241})
V_952 = Vertex(name = 'V_952',
particles = [ P.s__tilde__, P.b, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4182,(0,1):C.GC_4272})
V_953 = Vertex(name = 'V_953',
particles = [ P.b__tilde__, P.b, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_421})
V_954 = Vertex(name = 'V_954',
particles = [ P.b__tilde__, P.b, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_4183})
V_955 = Vertex(name = 'V_955',
particles = [ P.d__tilde__, P.d, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_532})
V_956 = Vertex(name = 'V_956',
particles = [ P.d__tilde__, P.d, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_2737})
V_957 = Vertex(name = 'V_957',
particles = [ P.s__tilde__, P.d, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3050,(0,1):C.GC_3085})
V_958 = Vertex(name = 'V_958',
particles = [ P.b__tilde__, P.d, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3051,(0,1):C.GC_3010})
V_959 = Vertex(name = 'V_959',
particles = [ P.d__tilde__, P.s, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3499,(0,1):C.GC_3435})
V_960 = Vertex(name = 'V_960',
particles = [ P.s__tilde__, P.s, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_705})
V_961 = Vertex(name = 'V_961',
particles = [ P.s__tilde__, P.s, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_3500})
V_962 = Vertex(name = 'V_962',
particles = [ P.b__tilde__, P.s, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3501,(0,1):C.GC_3412})
V_963 = Vertex(name = 'V_963',
particles = [ P.d__tilde__, P.b, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4216,(0,1):C.GC_4252})
V_964 = Vertex(name = 'V_964',
particles = [ P.s__tilde__, P.b, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4217,(0,1):C.GC_4283})
V_965 = Vertex(name = 'V_965',
particles = [ P.b__tilde__, P.b, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_435})
V_966 = Vertex(name = 'V_966',
particles = [ P.b__tilde__, P.b, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_4218})
V_967 = Vertex(name = 'V_967',
particles = [ P.u__tilde__, P.d, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_517,(0,1):C.GC_993})
V_968 = Vertex(name = 'V_968',
particles = [ P.u__tilde__, P.d, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2723,(0,1):C.GC_4018})
V_969 = Vertex(name = 'V_969',
particles = [ P.c__tilde__, P.d, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3026,(0,1):C.GC_2493})
V_970 = Vertex(name = 'V_970',
particles = [ P.t__tilde__, P.d, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3027,(0,1):C.GC_3856})
V_971 = Vertex(name = 'V_971',
particles = [ P.u__tilde__, P.s, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3460,(0,1):C.GC_1759})
V_972 = Vertex(name = 'V_972',
particles = [ P.c__tilde__, P.s, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_690,(0,1):C.GC_462})
V_973 = Vertex(name = 'V_973',
particles = [ P.c__tilde__, P.s, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3461,(0,1):C.GC_2494})
V_974 = Vertex(name = 'V_974',
particles = [ P.t__tilde__, P.s, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3463,(0,1):C.GC_3857})
V_975 = Vertex(name = 'V_975',
particles = [ P.u__tilde__, P.b, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4177,(0,1):C.GC_1760})
V_976 = Vertex(name = 'V_976',
particles = [ P.c__tilde__, P.b, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4178,(0,1):C.GC_2496})
V_977 = Vertex(name = 'V_977',
particles = [ P.t__tilde__, P.b, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_420,(0,1):C.GC_769})
V_978 = Vertex(name = 'V_978',
particles = [ P.t__tilde__, P.b, P.a, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4179,(0,1):C.GC_3858})
V_979 = Vertex(name = 'V_979',
particles = [ P.u__tilde__, P.d, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_531,(0,1):C.GC_1007})
V_980 = Vertex(name = 'V_980',
particles = [ P.u__tilde__, P.d, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2736,(0,1):C.GC_4042})
V_981 = Vertex(name = 'V_981',
particles = [ P.c__tilde__, P.d, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3048,(0,1):C.GC_2528})
V_982 = Vertex(name = 'V_982',
particles = [ P.t__tilde__, P.d, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3049,(0,1):C.GC_3891})
V_983 = Vertex(name = 'V_983',
particles = [ P.u__tilde__, P.s, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3495,(0,1):C.GC_1781})
V_984 = Vertex(name = 'V_984',
particles = [ P.c__tilde__, P.s, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_704,(0,1):C.GC_476})
V_985 = Vertex(name = 'V_985',
particles = [ P.c__tilde__, P.s, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3496,(0,1):C.GC_2529})
V_986 = Vertex(name = 'V_986',
particles = [ P.t__tilde__, P.s, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3498,(0,1):C.GC_3892})
V_987 = Vertex(name = 'V_987',
particles = [ P.u__tilde__, P.b, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4212,(0,1):C.GC_1782})
V_988 = Vertex(name = 'V_988',
particles = [ P.c__tilde__, P.b, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4213,(0,1):C.GC_2531})
V_989 = Vertex(name = 'V_989',
particles = [ P.t__tilde__, P.b, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_434,(0,1):C.GC_785})
V_990 = Vertex(name = 'V_990',
particles = [ P.t__tilde__, P.b, P.a, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4214,(0,1):C.GC_3893})
V_991 = Vertex(name = 'V_991',
particles = [ P.d__tilde__, P.d, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_520})
V_992 = Vertex(name = 'V_992',
particles = [ P.d__tilde__, P.d, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_2725})
V_993 = Vertex(name = 'V_993',
particles = [ P.s__tilde__, P.d, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3030,(0,1):C.GC_3075})
V_994 = Vertex(name = 'V_994',
particles = [ P.b__tilde__, P.d, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3031,(0,1):C.GC_3000})
V_995 = Vertex(name = 'V_995',
particles = [ P.d__tilde__, P.s, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3467,(0,1):C.GC_3425})
V_996 = Vertex(name = 'V_996',
particles = [ P.s__tilde__, P.s, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_693})
V_997 = Vertex(name = 'V_997',
particles = [ P.s__tilde__, P.s, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_3468})
V_998 = Vertex(name = 'V_998',
particles = [ P.b__tilde__, P.s, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3469,(0,1):C.GC_3402})
V_999 = Vertex(name = 'V_999',
particles = [ P.d__tilde__, P.b, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4184,(0,1):C.GC_4242})
V_1000 = Vertex(name = 'V_1000',
particles = [ P.s__tilde__, P.b, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4185,(0,1):C.GC_4273})
V_1001 = Vertex(name = 'V_1001',
particles = [ P.b__tilde__, P.b, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_423})
V_1002 = Vertex(name = 'V_1002',
particles = [ P.b__tilde__, P.b, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_4186})
V_1003 = Vertex(name = 'V_1003',
particles = [ P.d__tilde__, P.d, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_533})
V_1004 = Vertex(name = 'V_1004',
particles = [ P.d__tilde__, P.d, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_2738})
V_1005 = Vertex(name = 'V_1005',
particles = [ P.s__tilde__, P.d, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV3, L.FFVV5 ],
couplings = {(0,0):C.GC_3052,(0,1):C.GC_3086,(0,2):C.GC_1950})
V_1006 = Vertex(name = 'V_1006',
particles = [ P.b__tilde__, P.d, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3053,(0,1):C.GC_3011})
V_1007 = Vertex(name = 'V_1007',
particles = [ P.d__tilde__, P.s, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3502,(0,1):C.GC_3436})
V_1008 = Vertex(name = 'V_1008',
particles = [ P.s__tilde__, P.s, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_706})
V_1009 = Vertex(name = 'V_1009',
particles = [ P.s__tilde__, P.s, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_3503})
V_1010 = Vertex(name = 'V_1010',
particles = [ P.b__tilde__, P.s, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3504,(0,1):C.GC_3413})
V_1011 = Vertex(name = 'V_1011',
particles = [ P.d__tilde__, P.b, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4219,(0,1):C.GC_4253})
V_1012 = Vertex(name = 'V_1012',
particles = [ P.s__tilde__, P.b, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4220,(0,1):C.GC_4284})
V_1013 = Vertex(name = 'V_1013',
particles = [ P.b__tilde__, P.b, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_436})
V_1014 = Vertex(name = 'V_1014',
particles = [ P.b__tilde__, P.b, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_4221})
V_1015 = Vertex(name = 'V_1015',
particles = [ P.e__plus__, P.e__minus__, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_572})
V_1016 = Vertex(name = 'V_1016',
particles = [ P.e__plus__, P.e__minus__, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_581})
V_1017 = Vertex(name = 'V_1017',
particles = [ P.mu__plus__, P.mu__minus__, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_629})
V_1018 = Vertex(name = 'V_1018',
particles = [ P.mu__plus__, P.mu__minus__, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_638})
V_1019 = Vertex(name = 'V_1019',
particles = [ P.ta__plus__, P.ta__minus__, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_900})
V_1020 = Vertex(name = 'V_1020',
particles = [ P.ta__plus__, P.ta__minus__, P.a, P.H ],
color = [ '1' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_909})
V_1021 = Vertex(name = 'V_1021',
particles = [ P.ve__tilde__, P.e__minus__, P.a, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS1 ],
couplings = {(0,0):C.GC_575})
V_1022 = Vertex(name = 'V_1022',
particles = [ P.vm__tilde__, P.mu__minus__, P.a, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS1 ],
couplings = {(0,0):C.GC_632})
V_1023 = Vertex(name = 'V_1023',
particles = [ P.vt__tilde__, P.ta__minus__, P.a, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS1 ],
couplings = {(0,0):C.GC_903})
V_1024 = Vertex(name = 'V_1024',
particles = [ P.ve__tilde__, P.e__minus__, P.a, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFVV1 ],
couplings = {(0,0):C.GC_587})
V_1025 = Vertex(name = 'V_1025',
particles = [ P.vm__tilde__, P.mu__minus__, P.a, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFVV1 ],
couplings = {(0,0):C.GC_644})
V_1026 = Vertex(name = 'V_1026',
particles = [ P.vt__tilde__, P.ta__minus__, P.a, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFVV1 ],
couplings = {(0,0):C.GC_915})
V_1027 = Vertex(name = 'V_1027',
particles = [ P.e__plus__, P.e__minus__, P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_577})
V_1028 = Vertex(name = 'V_1028',
particles = [ P.mu__plus__, P.mu__minus__, P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_634})
V_1029 = Vertex(name = 'V_1029',
particles = [ P.ta__plus__, P.ta__minus__, P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_905})
V_1030 = Vertex(name = 'V_1030',
particles = [ P.e__plus__, P.e__minus__, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_588})
V_1031 = Vertex(name = 'V_1031',
particles = [ P.mu__plus__, P.mu__minus__, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_645})
V_1032 = Vertex(name = 'V_1032',
particles = [ P.ta__plus__, P.ta__minus__, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_916})
V_1033 = Vertex(name = 'V_1033',
particles = [ P.u__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_987})
V_1034 = Vertex(name = 'V_1034',
particles = [ P.u__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_1000})
V_1035 = Vertex(name = 'V_1035',
particles = [ P.u__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_4007})
V_1036 = Vertex(name = 'V_1036',
particles = [ P.u__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_4029})
V_1037 = Vertex(name = 'V_1037',
particles = [ P.c__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2645,(0,1):C.GC_2478})
V_1038 = Vertex(name = 'V_1038',
particles = [ P.c__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2655,(0,1):C.GC_2510})
V_1039 = Vertex(name = 'V_1039',
particles = [ P.t__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4006,(0,1):C.GC_3841})
V_1040 = Vertex(name = 'V_1040',
particles = [ P.t__tilde__, P.u, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4028,(0,1):C.GC_3873})
V_1041 = Vertex(name = 'V_1041',
particles = [ P.u__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1673,(0,1):C.GC_1749})
V_1042 = Vertex(name = 'V_1042',
particles = [ P.u__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1683,(0,1):C.GC_1769})
V_1043 = Vertex(name = 'V_1043',
particles = [ P.c__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_456})
V_1044 = Vertex(name = 'V_1044',
particles = [ P.c__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_469})
V_1045 = Vertex(name = 'V_1045',
particles = [ P.c__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_2479})
V_1046 = Vertex(name = 'V_1046',
particles = [ P.c__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_2511})
V_1047 = Vertex(name = 'V_1047',
particles = [ P.t__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3801,(0,1):C.GC_3842})
V_1048 = Vertex(name = 'V_1048',
particles = [ P.t__tilde__, P.c, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3811,(0,1):C.GC_3874})
V_1049 = Vertex(name = 'V_1049',
particles = [ P.u__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1706,(0,1):C.GC_1750})
V_1050 = Vertex(name = 'V_1050',
particles = [ P.u__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1716,(0,1):C.GC_1770})
V_1051 = Vertex(name = 'V_1051',
particles = [ P.c__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5, L.FFVS6 ],
couplings = {(0,0):C.GC_2584,(0,1):C.GC_2480,(0,2):C.GC_2291})
V_1052 = Vertex(name = 'V_1052',
particles = [ P.c__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2594,(0,1):C.GC_2512})
V_1053 = Vertex(name = 'V_1053',
particles = [ P.t__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_763})
V_1054 = Vertex(name = 'V_1054',
particles = [ P.t__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_778})
V_1055 = Vertex(name = 'V_1055',
particles = [ P.t__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_3843})
V_1056 = Vertex(name = 'V_1056',
particles = [ P.t__tilde__, P.t, P.a, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_3875})
V_1057 = Vertex(name = 'V_1057',
particles = [ P.c__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2656,(0,1):C.GC_2513})
V_1058 = Vertex(name = 'V_1058',
particles = [ P.c__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2666,(0,1):C.GC_2545})
V_1059 = Vertex(name = 'V_1059',
particles = [ P.t__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4030,(0,1):C.GC_3876})
V_1060 = Vertex(name = 'V_1060',
particles = [ P.t__tilde__, P.u, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4052,(0,1):C.GC_3908})
V_1061 = Vertex(name = 'V_1061',
particles = [ P.u__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1684,(0,1):C.GC_1771})
V_1062 = Vertex(name = 'V_1062',
particles = [ P.u__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1694,(0,1):C.GC_1791})
V_1063 = Vertex(name = 'V_1063',
particles = [ P.t__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3812,(0,1):C.GC_3877})
V_1064 = Vertex(name = 'V_1064',
particles = [ P.t__tilde__, P.c, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3822,(0,1):C.GC_3909})
V_1065 = Vertex(name = 'V_1065',
particles = [ P.u__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1717,(0,1):C.GC_1772})
V_1066 = Vertex(name = 'V_1066',
particles = [ P.u__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1727,(0,1):C.GC_1792})
V_1067 = Vertex(name = 'V_1067',
particles = [ P.c__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2595,(0,1):C.GC_2515})
V_1068 = Vertex(name = 'V_1068',
particles = [ P.c__tilde__, P.t, P.a ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2605,(0,1):C.GC_2547})
V_1069 = Vertex(name = 'V_1069',
particles = [ P.u__tilde__, P.u, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_988})
V_1070 = Vertex(name = 'V_1070',
particles = [ P.u__tilde__, P.u, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_4009})
V_1071 = Vertex(name = 'V_1071',
particles = [ P.c__tilde__, P.u, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2646,(0,1):C.GC_2481})
V_1072 = Vertex(name = 'V_1072',
particles = [ P.t__tilde__, P.u, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_4008,(0,1):C.GC_3844})
V_1073 = Vertex(name = 'V_1073',
particles = [ P.u__tilde__, P.c, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1674,(0,1):C.GC_1751})
V_1074 = Vertex(name = 'V_1074',
particles = [ P.c__tilde__, P.c, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_457})
V_1075 = Vertex(name = 'V_1075',
particles = [ P.c__tilde__, P.c, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_2482})
V_1076 = Vertex(name = 'V_1076',
particles = [ P.t__tilde__, P.c, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_3802,(0,1):C.GC_3845})
V_1077 = Vertex(name = 'V_1077',
particles = [ P.u__tilde__, P.t, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_1707,(0,1):C.GC_1752})
V_1078 = Vertex(name = 'V_1078',
particles = [ P.c__tilde__, P.t, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS2, L.FFVS5 ],
couplings = {(0,0):C.GC_2585,(0,1):C.GC_2483})
V_1079 = Vertex(name = 'V_1079',
particles = [ P.t__tilde__, P.t, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_764})
V_1080 = Vertex(name = 'V_1080',
particles = [ P.t__tilde__, P.t, P.g, P.H ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFVS4 ],
couplings = {(0,0):C.GC_3846})
V_1081 = Vertex(name = 'V_1081',
particles = [ P.c__tilde__, P.u, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2657,(0,1):C.GC_2516})
V_1082 = Vertex(name = 'V_1082',
particles = [ P.t__tilde__, P.u, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_4032,(0,1):C.GC_3879})
V_1083 = Vertex(name = 'V_1083',
particles = [ P.u__tilde__, P.c, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1685,(0,1):C.GC_1773})
V_1084 = Vertex(name = 'V_1084',
particles = [ P.t__tilde__, P.c, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_3813,(0,1):C.GC_3880})
V_1085 = Vertex(name = 'V_1085',
particles = [ P.u__tilde__, P.t, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_1718,(0,1):C.GC_1774})
V_1086 = Vertex(name = 'V_1086',
particles = [ P.c__tilde__, P.t, P.g ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV5, L.FFV8 ],
couplings = {(0,0):C.GC_2596,(0,1):C.GC_2518})
V_1087 = Vertex(name = 'V_1087',
particles = [ P.u__tilde__, P.u, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_994})
V_1088 = Vertex(name = 'V_1088',
particles = [ P.u__tilde__, P.u, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_4020})
V_1089 = Vertex(name = 'V_1089',
particles = [ P.c__tilde__, P.u, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2651,(0,1):C.GC_2497})
V_1090 = Vertex(name = 'V_1090',
particles = [ P.t__tilde__, P.u, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4019,(0,1):C.GC_3860})
V_1091 = Vertex(name = 'V_1091',
particles = [ P.u__tilde__, P.c, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1679,(0,1):C.GC_1761})
V_1092 = Vertex(name = 'V_1092',
particles = [ P.c__tilde__, P.c, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_463})
V_1093 = Vertex(name = 'V_1093',
particles = [ P.c__tilde__, P.c, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_2498})
V_1094 = Vertex(name = 'V_1094',
particles = [ P.t__tilde__, P.c, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3807,(0,1):C.GC_3861})
V_1095 = Vertex(name = 'V_1095',
particles = [ P.u__tilde__, P.t, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1712,(0,1):C.GC_1762})
V_1096 = Vertex(name = 'V_1096',
particles = [ P.c__tilde__, P.t, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2590,(0,1):C.GC_2499})
V_1097 = Vertex(name = 'V_1097',
particles = [ P.t__tilde__, P.t, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_770})
V_1098 = Vertex(name = 'V_1098',
particles = [ P.t__tilde__, P.t, P.g, P.g, P.H ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_3862})
V_1099 = Vertex(name = 'V_1099',
particles = [ P.u__tilde__, P.u, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_1008})
V_1100 = Vertex(name = 'V_1100',
particles = [ P.u__tilde__, P.u, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_4044})
V_1101 = Vertex(name = 'V_1101',
particles = [ P.c__tilde__, P.u, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2662,(0,1):C.GC_2532})
V_1102 = Vertex(name = 'V_1102',
particles = [ P.t__tilde__, P.u, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4043,(0,1):C.GC_3895})
V_1103 = Vertex(name = 'V_1103',
particles = [ P.u__tilde__, P.c, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1690,(0,1):C.GC_1783})
V_1104 = Vertex(name = 'V_1104',
particles = [ P.c__tilde__, P.c, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_477})
V_1105 = Vertex(name = 'V_1105',
particles = [ P.c__tilde__, P.c, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_2533})
V_1106 = Vertex(name = 'V_1106',
particles = [ P.t__tilde__, P.c, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3818,(0,1):C.GC_3896})
V_1107 = Vertex(name = 'V_1107',
particles = [ P.u__tilde__, P.t, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1723,(0,1):C.GC_1784})
V_1108 = Vertex(name = 'V_1108',
particles = [ P.c__tilde__, P.t, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2601,(0,1):C.GC_2534})
V_1109 = Vertex(name = 'V_1109',
particles = [ P.t__tilde__, P.t, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_786})
V_1110 = Vertex(name = 'V_1110',
particles = [ P.t__tilde__, P.t, P.g, P.g ],
color = [ 'f(-1,3,4)*T(-1,2,1)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_3897})
V_1111 = Vertex(name = 'V_1111',
particles = [ P.d__tilde__, P.u, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_992,(0,1):C.GC_516})
V_1112 = Vertex(name = 'V_1112',
particles = [ P.d__tilde__, P.u, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4017,(0,1):C.GC_2722})
V_1113 = Vertex(name = 'V_1113',
particles = [ P.s__tilde__, P.u, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2650,(0,1):C.GC_3073})
V_1114 = Vertex(name = 'V_1114',
particles = [ P.b__tilde__, P.u, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4016,(0,1):C.GC_2998})
V_1115 = Vertex(name = 'V_1115',
particles = [ P.d__tilde__, P.c, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1678,(0,1):C.GC_3423})
V_1116 = Vertex(name = 'V_1116',
particles = [ P.s__tilde__, P.c, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_461,(0,1):C.GC_689})
V_1117 = Vertex(name = 'V_1117',
particles = [ P.s__tilde__, P.c, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2495,(0,1):C.GC_3462})
V_1118 = Vertex(name = 'V_1118',
particles = [ P.b__tilde__, P.c, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3806,(0,1):C.GC_3400})
V_1119 = Vertex(name = 'V_1119',
particles = [ P.d__tilde__, P.t, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1711,(0,1):C.GC_4240})
V_1120 = Vertex(name = 'V_1120',
particles = [ P.s__tilde__, P.t, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2589,(0,1):C.GC_4271})
V_1121 = Vertex(name = 'V_1121',
particles = [ P.b__tilde__, P.t, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_768,(0,1):C.GC_419})
V_1122 = Vertex(name = 'V_1122',
particles = [ P.b__tilde__, P.t, P.a, P.W__minus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3859,(0,1):C.GC_4180})
V_1123 = Vertex(name = 'V_1123',
particles = [ P.d__tilde__, P.u, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1006,(0,1):C.GC_530})
V_1124 = Vertex(name = 'V_1124',
particles = [ P.d__tilde__, P.u, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4041,(0,1):C.GC_2735})
V_1125 = Vertex(name = 'V_1125',
particles = [ P.s__tilde__, P.u, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2661,(0,1):C.GC_3084})
V_1126 = Vertex(name = 'V_1126',
particles = [ P.b__tilde__, P.u, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4040,(0,1):C.GC_3009})
V_1127 = Vertex(name = 'V_1127',
particles = [ P.d__tilde__, P.c, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1689,(0,1):C.GC_3434})
V_1128 = Vertex(name = 'V_1128',
particles = [ P.s__tilde__, P.c, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_475,(0,1):C.GC_703})
V_1129 = Vertex(name = 'V_1129',
particles = [ P.s__tilde__, P.c, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2530,(0,1):C.GC_3497})
V_1130 = Vertex(name = 'V_1130',
particles = [ P.b__tilde__, P.c, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3817,(0,1):C.GC_3411})
V_1131 = Vertex(name = 'V_1131',
particles = [ P.d__tilde__, P.t, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1722,(0,1):C.GC_4251})
V_1132 = Vertex(name = 'V_1132',
particles = [ P.s__tilde__, P.t, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2600,(0,1):C.GC_4282})
V_1133 = Vertex(name = 'V_1133',
particles = [ P.b__tilde__, P.t, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_784,(0,1):C.GC_433})
V_1134 = Vertex(name = 'V_1134',
particles = [ P.b__tilde__, P.t, P.a, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3894,(0,1):C.GC_4215})
V_1135 = Vertex(name = 'V_1135',
particles = [ P.u__tilde__, P.u, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_996})
V_1136 = Vertex(name = 'V_1136',
particles = [ P.u__tilde__, P.u, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_4022})
V_1137 = Vertex(name = 'V_1137',
particles = [ P.c__tilde__, P.u, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2652,(0,1):C.GC_2500})
V_1138 = Vertex(name = 'V_1138',
particles = [ P.t__tilde__, P.u, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4021,(0,1):C.GC_3863})
V_1139 = Vertex(name = 'V_1139',
particles = [ P.u__tilde__, P.c, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1680,(0,1):C.GC_1763})
V_1140 = Vertex(name = 'V_1140',
particles = [ P.c__tilde__, P.c, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_465})
V_1141 = Vertex(name = 'V_1141',
particles = [ P.c__tilde__, P.c, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_2501})
V_1142 = Vertex(name = 'V_1142',
particles = [ P.t__tilde__, P.c, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3808,(0,1):C.GC_3864})
V_1143 = Vertex(name = 'V_1143',
particles = [ P.u__tilde__, P.t, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1713,(0,1):C.GC_1764})
V_1144 = Vertex(name = 'V_1144',
particles = [ P.c__tilde__, P.t, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2591,(0,1):C.GC_2502})
V_1145 = Vertex(name = 'V_1145',
particles = [ P.t__tilde__, P.t, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_774})
V_1146 = Vertex(name = 'V_1146',
particles = [ P.t__tilde__, P.t, P.W__minus__, P.W__plus__, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS2 ],
couplings = {(0,0):C.GC_3865})
V_1147 = Vertex(name = 'V_1147',
particles = [ P.u__tilde__, P.u, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_1009})
V_1148 = Vertex(name = 'V_1148',
particles = [ P.u__tilde__, P.u, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_4046})
V_1149 = Vertex(name = 'V_1149',
particles = [ P.c__tilde__, P.u, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2663,(0,1):C.GC_2535})
V_1150 = Vertex(name = 'V_1150',
particles = [ P.t__tilde__, P.u, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4045,(0,1):C.GC_3898})
V_1151 = Vertex(name = 'V_1151',
particles = [ P.u__tilde__, P.c, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1691,(0,1):C.GC_1785})
V_1152 = Vertex(name = 'V_1152',
particles = [ P.c__tilde__, P.c, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_478})
V_1153 = Vertex(name = 'V_1153',
particles = [ P.c__tilde__, P.c, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_2536})
V_1154 = Vertex(name = 'V_1154',
particles = [ P.t__tilde__, P.c, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3819,(0,1):C.GC_3899})
V_1155 = Vertex(name = 'V_1155',
particles = [ P.u__tilde__, P.t, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1724,(0,1):C.GC_1786})
V_1156 = Vertex(name = 'V_1156',
particles = [ P.c__tilde__, P.t, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2602,(0,1):C.GC_2537})
V_1157 = Vertex(name = 'V_1157',
particles = [ P.t__tilde__, P.t, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_787})
V_1158 = Vertex(name = 'V_1158',
particles = [ P.t__tilde__, P.t, P.W__minus__, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV2 ],
couplings = {(0,0):C.GC_3900})
V_1159 = Vertex(name = 'V_1159',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_521,(0,1):C.GC_997})
V_1160 = Vertex(name = 'V_1160',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2726,(0,1):C.GC_4024})
V_1161 = Vertex(name = 'V_1161',
particles = [ P.c__tilde__, P.d, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3032,(0,1):C.GC_2503})
V_1162 = Vertex(name = 'V_1162',
particles = [ P.t__tilde__, P.d, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3033,(0,1):C.GC_3866})
V_1163 = Vertex(name = 'V_1163',
particles = [ P.u__tilde__, P.s, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3470,(0,1):C.GC_1765})
V_1164 = Vertex(name = 'V_1164',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_694,(0,1):C.GC_466})
V_1165 = Vertex(name = 'V_1165',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3472,(0,1):C.GC_2505})
V_1166 = Vertex(name = 'V_1166',
particles = [ P.t__tilde__, P.s, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3473,(0,1):C.GC_3867})
V_1167 = Vertex(name = 'V_1167',
particles = [ P.u__tilde__, P.b, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4187,(0,1):C.GC_1766})
V_1168 = Vertex(name = 'V_1168',
particles = [ P.c__tilde__, P.b, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4188,(0,1):C.GC_2506})
V_1169 = Vertex(name = 'V_1169',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_424,(0,1):C.GC_775})
V_1170 = Vertex(name = 'V_1170',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4190,(0,1):C.GC_3869})
V_1171 = Vertex(name = 'V_1171',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_534,(0,1):C.GC_1010})
V_1172 = Vertex(name = 'V_1172',
particles = [ P.u__tilde__, P.d, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2739,(0,1):C.GC_4048})
V_1173 = Vertex(name = 'V_1173',
particles = [ P.c__tilde__, P.d, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3054,(0,1):C.GC_2538})
V_1174 = Vertex(name = 'V_1174',
particles = [ P.t__tilde__, P.d, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3055,(0,1):C.GC_3901})
V_1175 = Vertex(name = 'V_1175',
particles = [ P.u__tilde__, P.s, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3505,(0,1):C.GC_1787})
V_1176 = Vertex(name = 'V_1176',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_707,(0,1):C.GC_479})
V_1177 = Vertex(name = 'V_1177',
particles = [ P.c__tilde__, P.s, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3507,(0,1):C.GC_2540})
V_1178 = Vertex(name = 'V_1178',
particles = [ P.t__tilde__, P.s, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3508,(0,1):C.GC_3902})
V_1179 = Vertex(name = 'V_1179',
particles = [ P.u__tilde__, P.b, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4222,(0,1):C.GC_1788})
V_1180 = Vertex(name = 'V_1180',
particles = [ P.c__tilde__, P.b, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4223,(0,1):C.GC_2541})
V_1181 = Vertex(name = 'V_1181',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_437,(0,1):C.GC_788})
V_1182 = Vertex(name = 'V_1182',
particles = [ P.t__tilde__, P.b, P.W__plus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4225,(0,1):C.GC_3904})
V_1183 = Vertex(name = 'V_1183',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS1 ],
couplings = {(0,0):C.GC_578})
V_1184 = Vertex(name = 'V_1184',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS1 ],
couplings = {(0,0):C.GC_635})
V_1185 = Vertex(name = 'V_1185',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS1 ],
couplings = {(0,0):C.GC_906})
V_1186 = Vertex(name = 'V_1186',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFVV1 ],
couplings = {(0,0):C.GC_589})
V_1187 = Vertex(name = 'V_1187',
particles = [ P.vm__tilde__, P.mu__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFVV1 ],
couplings = {(0,0):C.GC_646})
V_1188 = Vertex(name = 'V_1188',
particles = [ P.vt__tilde__, P.ta__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFVV1 ],
couplings = {(0,0):C.GC_917})
V_1189 = Vertex(name = 'V_1189',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_998,(0,1):C.GC_522})
V_1190 = Vertex(name = 'V_1190',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4025,(0,1):C.GC_2727})
V_1191 = Vertex(name = 'V_1191',
particles = [ P.s__tilde__, P.u, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2653,(0,1):C.GC_3076})
V_1192 = Vertex(name = 'V_1192',
particles = [ P.b__tilde__, P.u, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_4023,(0,1):C.GC_3001})
V_1193 = Vertex(name = 'V_1193',
particles = [ P.d__tilde__, P.c, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1681,(0,1):C.GC_3426})
V_1194 = Vertex(name = 'V_1194',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_467,(0,1):C.GC_695})
V_1195 = Vertex(name = 'V_1195',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2504,(0,1):C.GC_3471})
V_1196 = Vertex(name = 'V_1196',
particles = [ P.b__tilde__, P.c, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3809,(0,1):C.GC_3403})
V_1197 = Vertex(name = 'V_1197',
particles = [ P.d__tilde__, P.t, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_1714,(0,1):C.GC_4243})
V_1198 = Vertex(name = 'V_1198',
particles = [ P.s__tilde__, P.t, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_2592,(0,1):C.GC_4274})
V_1199 = Vertex(name = 'V_1199',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_776,(0,1):C.GC_425})
V_1200 = Vertex(name = 'V_1200',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.Z, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVVS1, L.FFVVS3 ],
couplings = {(0,0):C.GC_3868,(0,1):C.GC_4189})
V_1201 = Vertex(name = 'V_1201',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1011,(0,1):C.GC_535})
V_1202 = Vertex(name = 'V_1202',
particles = [ P.d__tilde__, P.u, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4049,(0,1):C.GC_2740})
V_1203 = Vertex(name = 'V_1203',
particles = [ P.s__tilde__, P.u, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2664,(0,1):C.GC_3087})
V_1204 = Vertex(name = 'V_1204',
particles = [ P.b__tilde__, P.u, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_4047,(0,1):C.GC_3012})
V_1205 = Vertex(name = 'V_1205',
particles = [ P.d__tilde__, P.c, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1692,(0,1):C.GC_3437})
V_1206 = Vertex(name = 'V_1206',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_480,(0,1):C.GC_708})
V_1207 = Vertex(name = 'V_1207',
particles = [ P.s__tilde__, P.c, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2539,(0,1):C.GC_3506})
V_1208 = Vertex(name = 'V_1208',
particles = [ P.b__tilde__, P.c, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3820,(0,1):C.GC_3414})
V_1209 = Vertex(name = 'V_1209',
particles = [ P.d__tilde__, P.t, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_1725,(0,1):C.GC_4254})
V_1210 = Vertex(name = 'V_1210',
particles = [ P.s__tilde__, P.t, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_2603,(0,1):C.GC_4285})
V_1211 = Vertex(name = 'V_1211',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_789,(0,1):C.GC_438})
V_1212 = Vertex(name = 'V_1212',
particles = [ P.b__tilde__, P.t, P.W__minus__, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFVV1, L.FFVV4 ],
couplings = {(0,0):C.GC_3903,(0,1):C.GC_4224})
V_1213 = Vertex(name = 'V_1213',
particles = [ P.e__plus__, P.ve, P.a, P.W__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS3 ],
couplings = {(0,0):C.GC_574})
V_1214 = Vertex(name = 'V_1214',
particles = [ P.mu__plus__, P.vm, P.a, P.W__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS3 ],
couplings = {(0,0):C.GC_631})
V_1215 = Vertex(name = 'V_1215',
particles = [ P.ta__plus__, P.vt, P.a, P.W__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS3 ],
couplings = {(0,0):C.GC_902})
V_1216 = Vertex(name = 'V_1216',
particles = [ P.e__plus__, P.ve, P.a, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFVV4 ],
couplings = {(0,0):C.GC_586})
V_1217 = Vertex(name = 'V_1217',
particles = [ P.mu__plus__, P.vm, P.a, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFVV4 ],
couplings = {(0,0):C.GC_643})
V_1218 = Vertex(name = 'V_1218',
particles = [ P.ta__plus__, P.vt, P.a, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFVV4 ],
couplings = {(0,0):C.GC_914})
V_1219 = Vertex(name = 'V_1219',
particles = [ P.e__plus__, P.ve, P.W__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS3 ],
couplings = {(0,0):C.GC_579})
V_1220 = Vertex(name = 'V_1220',
particles = [ P.mu__plus__, P.vm, P.W__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS3 ],
couplings = {(0,0):C.GC_636})
V_1221 = Vertex(name = 'V_1221',
particles = [ P.ta__plus__, P.vt, P.W__minus__, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.FFVVS3 ],
couplings = {(0,0):C.GC_907})
V_1222 = Vertex(name = 'V_1222',
particles = [ P.e__plus__, P.ve, P.W__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFVV4 ],
couplings = {(0,0):C.GC_590})
V_1223 = Vertex(name = 'V_1223',
particles = [ P.mu__plus__, P.vm, P.W__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFVV4 ],
couplings = {(0,0):C.GC_647})
V_1224 = Vertex(name = 'V_1224',
particles = [ P.ta__plus__, P.vt, P.W__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFVV4 ],
couplings = {(0,0):C.GC_918})
V_1225 = Vertex(name = 'V_1225',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_41,(0,0):C.GC_38,(2,0):C.GC_39,(1,3):C.GC_38,(3,3):C.GC_39,(1,1):C.GC_38,(3,1):C.GC_39,(1,2):C.GC_10,(0,4):C.GC_38,(2,4):C.GC_39,(0,5):C.GC_10})
V_1226 = Vertex(name = 'V_1226',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_42,(0,7):C.GC_42,(0,0):C.GC_2677,(2,0):C.GC_2678,(1,3):C.GC_2677,(3,3):C.GC_2678,(1,1):C.GC_2677,(3,1):C.GC_2678,(1,2):C.GC_11,(0,4):C.GC_2677,(2,4):C.GC_2678,(0,5):C.GC_11})
V_1227 = Vertex(name = 'V_1227',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_44,(0,7):C.GC_44,(0,0):C.GC_1151,(2,0):C.GC_1152,(1,3):C.GC_1151,(3,3):C.GC_1152,(1,1):C.GC_1151,(3,1):C.GC_1152,(1,2):C.GC_554,(0,4):C.GC_1151,(2,4):C.GC_1152,(0,5):C.GC_554})
V_1228 = Vertex(name = 'V_1228',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF14, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_45,(0,3):C.GC_45,(1,0):C.GC_555,(0,1):C.GC_555})
V_1229 = Vertex(name = 'V_1229',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2691,(0,1):C.GC_2691})
V_1230 = Vertex(name = 'V_1230',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2697,(0,1):C.GC_2697})
V_1231 = Vertex(name = 'V_1231',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2703,(0,1):C.GC_2703})
V_1232 = Vertex(name = 'V_1232',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2709,(0,1):C.GC_2709})
V_1233 = Vertex(name = 'V_1233',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_2965,(0,5):C.GC_2965,(0,0):C.GC_1951,(2,0):C.GC_1954,(1,2):C.GC_1210,(3,2):C.GC_1212,(1,1):C.GC_1951,(3,1):C.GC_1954,(0,3):C.GC_1210,(2,3):C.GC_1212})
V_1234 = Vertex(name = 'V_1234',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2969,(0,3):C.GC_2969,(1,0):C.GC_2949,(3,0):C.GC_2951,(0,1):C.GC_2949,(2,1):C.GC_2951})
V_1235 = Vertex(name = 'V_1235',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2973,(0,1):C.GC_2973})
V_1236 = Vertex(name = 'V_1236',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2979,(0,1):C.GC_2979})
V_1237 = Vertex(name = 'V_1237',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_2966,(0,5):C.GC_2966,(0,0):C.GC_2791,(2,0):C.GC_2794,(1,2):C.GC_1211,(3,2):C.GC_1213,(1,1):C.GC_2791,(3,1):C.GC_2794,(0,3):C.GC_1211,(2,3):C.GC_1213})
V_1238 = Vertex(name = 'V_1238',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2970,(0,3):C.GC_2970,(1,0):C.GC_2950,(3,0):C.GC_2952,(0,1):C.GC_2950,(2,1):C.GC_2952})
V_1239 = Vertex(name = 'V_1239',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2975,(0,1):C.GC_2975})
V_1240 = Vertex(name = 'V_1240',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2980,(0,1):C.GC_2980})
V_1241 = Vertex(name = 'V_1241',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_3348,(0,5):C.GC_3348,(0,0):C.GC_1353,(2,0):C.GC_1356,(1,2):C.GC_1353,(3,2):C.GC_1356,(1,1):C.GC_1216,(3,1):C.GC_1219,(0,3):C.GC_1216,(2,3):C.GC_1219})
V_1242 = Vertex(name = 'V_1242',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3356,(0,3):C.GC_3356,(0,0):C.GC_3318,(2,0):C.GC_3322,(1,1):C.GC_3318,(3,1):C.GC_3322})
V_1243 = Vertex(name = 'V_1243',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3363,(0,1):C.GC_3363})
V_1244 = Vertex(name = 'V_1244',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3372,(0,1):C.GC_3372})
V_1245 = Vertex(name = 'V_1245',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_42,(0,0):C.GC_2134,(2,0):C.GC_2137,(1,3):C.GC_38,(3,3):C.GC_39,(1,1):C.GC_38,(3,1):C.GC_39,(1,2):C.GC_10,(0,4):C.GC_1217,(2,4):C.GC_1220,(0,5):C.GC_11})
V_1246 = Vertex(name = 'V_1246',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_44,(0,5):C.GC_45,(1,2):C.GC_3260,(2,2):C.GC_3261,(1,0):C.GC_2893,(2,0):C.GC_2895,(1,1):C.GC_745,(0,3):C.GC_747})
V_1247 = Vertex(name = 'V_1247',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_4113,(0,3):C.GC_4121,(1,1):C.GC_1354,(2,1):C.GC_1357,(1,0):C.GC_1957,(2,0):C.GC_1960})
V_1248 = Vertex(name = 'V_1248',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4131,(0,1):C.GC_4139})
V_1249 = Vertex(name = 'V_1249',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_3352,(0,5):C.GC_3359,(0,0):C.GC_3141,(2,0):C.GC_3144,(1,2):C.GC_1355,(3,2):C.GC_1358,(1,1):C.GC_2807,(3,1):C.GC_2810,(0,3):C.GC_1218,(2,3):C.GC_1221})
V_1250 = Vertex(name = 'V_1250',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3367,(0,2):C.GC_3376,(1,0):C.GC_3321,(2,0):C.GC_3325})
V_1251 = Vertex(name = 'V_1251',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_4108,(0,5):C.GC_4108,(0,0):C.GC_1540,(2,0):C.GC_1543,(1,2):C.GC_1540,(3,2):C.GC_1543,(1,1):C.GC_1196,(3,1):C.GC_1199,(0,3):C.GC_1196,(2,3):C.GC_1199})
V_1252 = Vertex(name = 'V_1252',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_4119,(0,3):C.GC_4119,(0,0):C.GC_4078,(2,0):C.GC_4082,(1,1):C.GC_4078,(3,1):C.GC_4082})
V_1253 = Vertex(name = 'V_1253',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4127,(0,1):C.GC_4127})
V_1254 = Vertex(name = 'V_1254',
particles = [ P.d__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4137,(0,1):C.GC_4137})
V_1255 = Vertex(name = 'V_1255',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_4109,(0,5):C.GC_4120,(0,0):C.GC_2336,(2,0):C.GC_2339,(1,2):C.GC_1541,(3,2):C.GC_1544,(1,1):C.GC_1932,(3,1):C.GC_1935,(0,3):C.GC_1197,(2,3):C.GC_1200})
V_1256 = Vertex(name = 'V_1256',
particles = [ P.s__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_4129,(0,2):C.GC_4138,(1,0):C.GC_4079,(2,0):C.GC_4083})
V_1257 = Vertex(name = 'V_1257',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_42,(0,0):C.GC_3585,(2,0):C.GC_3588,(1,3):C.GC_38,(3,3):C.GC_39,(1,1):C.GC_38,(3,1):C.GC_39,(1,2):C.GC_10,(0,4):C.GC_1198,(2,4):C.GC_1201,(0,5):C.GC_11})
V_1258 = Vertex(name = 'V_1258',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_44,(0,5):C.GC_45,(1,2):C.GC_3932,(2,2):C.GC_3933,(1,0):C.GC_2892,(2,0):C.GC_2894,(1,1):C.GC_561,(0,3):C.GC_562})
V_1259 = Vertex(name = 'V_1259',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3350,(0,3):C.GC_3357,(1,1):C.GC_1542,(2,1):C.GC_1545,(1,0):C.GC_2785,(2,0):C.GC_2788})
V_1260 = Vertex(name = 'V_1260',
particles = [ P.b__tilde__, P.d, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3365,(0,1):C.GC_3373})
V_1261 = Vertex(name = 'V_1261',
particles = [ P.s__tilde__, P.d, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_2965,(0,5):C.GC_2965,(0,0):C.GC_2135,(2,0):C.GC_2138,(1,2):C.GC_2135,(3,2):C.GC_2138,(1,1):C.GC_2949,(3,1):C.GC_2951,(0,3):C.GC_2949,(2,3):C.GC_2951})
V_1262 = Vertex(name = 'V_1262',
particles = [ P.s__tilde__, P.d, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2969,(0,3):C.GC_2969,(1,0):C.GC_1958,(3,0):C.GC_1961,(0,1):C.GC_1958,(2,1):C.GC_1961})
V_1263 = Vertex(name = 'V_1263',
particles = [ P.s__tilde__, P.d, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2973,(0,1):C.GC_2973})
V_1264 = Vertex(name = 'V_1264',
particles = [ P.s__tilde__, P.d, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2979,(0,1):C.GC_2979})
V_1265 = Vertex(name = 'V_1265',
particles = [ P.b__tilde__, P.d, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_2970,(0,5):C.GC_2966,(0,0):C.GC_3142,(2,0):C.GC_3145,(1,2):C.GC_2136,(3,2):C.GC_2139,(1,1):C.GC_2808,(3,1):C.GC_2811,(0,3):C.GC_2950,(2,3):C.GC_2952})
V_1266 = Vertex(name = 'V_1266',
particles = [ P.b__tilde__, P.d, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)' ],
lorentz = [ L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2980,(0,2):C.GC_2975,(0,0):C.GC_1959,(2,0):C.GC_1962})
V_1267 = Vertex(name = 'V_1267',
particles = [ P.b__tilde__, P.d, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_2965,(0,5):C.GC_2969,(0,0):C.GC_3586,(2,0):C.GC_3589,(1,2):C.GC_2338,(3,2):C.GC_2341,(1,1):C.GC_2949,(3,1):C.GC_2951,(0,3):C.GC_1934,(2,3):C.GC_1937})
V_1268 = Vertex(name = 'V_1268',
particles = [ P.b__tilde__, P.d, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2973,(0,2):C.GC_2979,(1,0):C.GC_2786,(2,0):C.GC_2789})
V_1269 = Vertex(name = 'V_1269',
particles = [ P.b__tilde__, P.d, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_2966,(0,5):C.GC_2966,(0,0):C.GC_3587,(2,0):C.GC_3590,(1,2):C.GC_3587,(3,2):C.GC_3590,(1,1):C.GC_2950,(3,1):C.GC_2952,(0,3):C.GC_2950,(2,3):C.GC_2952})
V_1270 = Vertex(name = 'V_1270',
particles = [ P.b__tilde__, P.d, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2970,(0,3):C.GC_2970,(1,0):C.GC_2787,(3,0):C.GC_2790,(0,1):C.GC_2787,(2,1):C.GC_2790})
V_1271 = Vertex(name = 'V_1271',
particles = [ P.b__tilde__, P.d, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2975,(0,1):C.GC_2975})
V_1272 = Vertex(name = 'V_1272',
particles = [ P.b__tilde__, P.d, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2980,(0,1):C.GC_2980})
V_1273 = Vertex(name = 'V_1273',
particles = [ P.s__tilde__, P.s, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_3348,(0,5):C.GC_3348,(0,0):C.GC_3318,(2,0):C.GC_3322,(1,2):C.GC_1372,(3,2):C.GC_1375,(1,1):C.GC_3318,(3,1):C.GC_3322,(0,3):C.GC_1372,(2,3):C.GC_1375})
V_1274 = Vertex(name = 'V_1274',
particles = [ P.s__tilde__, P.s, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3356,(0,3):C.GC_3356,(0,0):C.GC_2142,(2,0):C.GC_2145,(1,1):C.GC_2142,(3,1):C.GC_2145})
V_1275 = Vertex(name = 'V_1275',
particles = [ P.s__tilde__, P.s, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3363,(0,1):C.GC_3363})
V_1276 = Vertex(name = 'V_1276',
particles = [ P.s__tilde__, P.s, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3372,(0,1):C.GC_3372})
V_1277 = Vertex(name = 'V_1277',
particles = [ P.s__tilde__, P.s, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_4119,(0,5):C.GC_4108,(0,0):C.GC_4078,(2,0):C.GC_4082,(1,2):C.GC_1549,(3,2):C.GC_1552,(1,1):C.GC_2105,(3,1):C.GC_2108,(0,3):C.GC_1348,(2,3):C.GC_1351})
V_1278 = Vertex(name = 'V_1278',
particles = [ P.s__tilde__, P.s, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)' ],
lorentz = [ L.FFFF12, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_4137,(0,2):C.GC_4127,(0,0):C.GC_2342,(2,0):C.GC_2345})
V_1279 = Vertex(name = 'V_1279',
particles = [ P.b__tilde__, P.s, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_3348,(0,5):C.GC_3356,(0,0):C.GC_3593,(2,0):C.GC_3596,(1,2):C.GC_1550,(3,2):C.GC_1553,(1,1):C.GC_3318,(3,1):C.GC_3322,(0,3):C.GC_1349,(2,3):C.GC_1352})
V_1280 = Vertex(name = 'V_1280',
particles = [ P.b__tilde__, P.s, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3363,(0,2):C.GC_3372,(1,0):C.GC_3135,(2,0):C.GC_3138})
V_1281 = Vertex(name = 'V_1281',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_41,(0,0):C.GC_38,(2,0):C.GC_39,(1,3):C.GC_38,(3,3):C.GC_39,(1,1):C.GC_38,(3,1):C.GC_39,(1,2):C.GC_10,(0,4):C.GC_38,(2,4):C.GC_39,(0,5):C.GC_10})
V_1282 = Vertex(name = 'V_1282',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_42,(0,7):C.GC_42,(0,0):C.GC_3320,(2,0):C.GC_3324,(1,3):C.GC_3320,(3,3):C.GC_3324,(1,1):C.GC_3320,(3,1):C.GC_3324,(1,2):C.GC_11,(0,4):C.GC_3320,(2,4):C.GC_3324,(0,5):C.GC_11})
V_1283 = Vertex(name = 'V_1283',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_44,(0,7):C.GC_44,(0,0):C.GC_2143,(2,0):C.GC_2146,(1,3):C.GC_2143,(3,3):C.GC_2146,(1,1):C.GC_2143,(3,1):C.GC_2146,(1,2):C.GC_737,(0,4):C.GC_2143,(2,4):C.GC_2146,(0,5):C.GC_737})
V_1284 = Vertex(name = 'V_1284',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF14, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_45,(0,3):C.GC_45,(1,0):C.GC_738,(0,1):C.GC_738})
V_1285 = Vertex(name = 'V_1285',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3351,(0,1):C.GC_3351})
V_1286 = Vertex(name = 'V_1286',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3358,(0,1):C.GC_3358})
V_1287 = Vertex(name = 'V_1287',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3366,(0,1):C.GC_3366})
V_1288 = Vertex(name = 'V_1288',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3374,(0,1):C.GC_3374})
V_1289 = Vertex(name = 'V_1289',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_3352,(0,5):C.GC_3352,(0,0):C.GC_3148,(2,0):C.GC_3151,(1,2):C.GC_3321,(3,2):C.GC_3325,(1,1):C.GC_3148,(3,1):C.GC_3151,(0,3):C.GC_3321,(2,3):C.GC_3325})
V_1290 = Vertex(name = 'V_1290',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3359,(0,3):C.GC_3359,(1,0):C.GC_2144,(3,0):C.GC_2147,(0,1):C.GC_2144,(2,1):C.GC_2147})
V_1291 = Vertex(name = 'V_1291',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3367,(0,1):C.GC_3367})
V_1292 = Vertex(name = 'V_1292',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3376,(0,1):C.GC_3376})
V_1293 = Vertex(name = 'V_1293',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_4109,(0,5):C.GC_4109,(0,0):C.GC_4079,(2,0):C.GC_4083,(1,2):C.GC_4079,(3,2):C.GC_4083,(1,1):C.GC_2106,(3,1):C.GC_2109,(0,3):C.GC_2106,(2,3):C.GC_2109})
V_1294 = Vertex(name = 'V_1294',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_4120,(0,3):C.GC_4120,(0,0):C.GC_2343,(2,0):C.GC_2346,(1,1):C.GC_2343,(3,1):C.GC_2346})
V_1295 = Vertex(name = 'V_1295',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4129,(0,1):C.GC_4129})
V_1296 = Vertex(name = 'V_1296',
particles = [ P.s__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4138,(0,1):C.GC_4138})
V_1297 = Vertex(name = 'V_1297',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_42,(0,0):C.GC_3594,(2,0):C.GC_3597,(1,3):C.GC_38,(3,3):C.GC_39,(1,1):C.GC_38,(3,1):C.GC_39,(1,2):C.GC_10,(0,4):C.GC_2107,(2,4):C.GC_2110,(0,5):C.GC_11})
V_1298 = Vertex(name = 'V_1298',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_44,(0,5):C.GC_45,(1,2):C.GC_4081,(2,2):C.GC_4085,(1,0):C.GC_3319,(2,0):C.GC_3323,(1,1):C.GC_744,(0,3):C.GC_746})
V_1299 = Vertex(name = 'V_1299',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2689,(0,3):C.GC_2696,(1,1):C.GC_2344,(2,1):C.GC_2347,(1,0):C.GC_3136,(2,0):C.GC_3139})
V_1300 = Vertex(name = 'V_1300',
particles = [ P.b__tilde__, P.s, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2702,(0,1):C.GC_2708})
V_1301 = Vertex(name = 'V_1301',
particles = [ P.b__tilde__, P.s, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_3352,(0,5):C.GC_3352,(0,0):C.GC_3595,(2,0):C.GC_3598,(1,2):C.GC_3595,(3,2):C.GC_3598,(1,1):C.GC_3321,(3,1):C.GC_3325,(0,3):C.GC_3321,(2,3):C.GC_3325})
V_1302 = Vertex(name = 'V_1302',
particles = [ P.b__tilde__, P.s, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3359,(0,3):C.GC_3359,(1,0):C.GC_3137,(3,0):C.GC_3140,(0,1):C.GC_3137,(2,1):C.GC_3140})
V_1303 = Vertex(name = 'V_1303',
particles = [ P.b__tilde__, P.s, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3367,(0,1):C.GC_3367})
V_1304 = Vertex(name = 'V_1304',
particles = [ P.b__tilde__, P.s, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3376,(0,1):C.GC_3376})
V_1305 = Vertex(name = 'V_1305',
particles = [ P.b__tilde__, P.b, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_4108,(0,5):C.GC_4108,(0,0):C.GC_4078,(2,0):C.GC_4082,(1,2):C.GC_1536,(3,2):C.GC_1539,(1,1):C.GC_4078,(3,1):C.GC_4082,(0,3):C.GC_1536,(2,3):C.GC_1539})
V_1306 = Vertex(name = 'V_1306',
particles = [ P.b__tilde__, P.b, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_4119,(0,3):C.GC_4119,(0,0):C.GC_3567,(2,0):C.GC_3570,(1,1):C.GC_3567,(3,1):C.GC_3570})
V_1307 = Vertex(name = 'V_1307',
particles = [ P.b__tilde__, P.b, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4127,(0,1):C.GC_4127})
V_1308 = Vertex(name = 'V_1308',
particles = [ P.b__tilde__, P.b, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4137,(0,1):C.GC_4137})
V_1309 = Vertex(name = 'V_1309',
particles = [ P.b__tilde__, P.b, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_4109,(0,5):C.GC_4109,(0,0):C.GC_4079,(2,0):C.GC_4083,(1,2):C.GC_2320,(3,2):C.GC_2323,(1,1):C.GC_4079,(3,1):C.GC_4083,(0,3):C.GC_2320,(2,3):C.GC_2323})
V_1310 = Vertex(name = 'V_1310',
particles = [ P.b__tilde__, P.b, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_4120,(0,3):C.GC_4120,(0,0):C.GC_3568,(2,0):C.GC_3571,(1,1):C.GC_3568,(3,1):C.GC_3571})
V_1311 = Vertex(name = 'V_1311',
particles = [ P.b__tilde__, P.b, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4129,(0,1):C.GC_4129})
V_1312 = Vertex(name = 'V_1312',
particles = [ P.b__tilde__, P.b, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4138,(0,1):C.GC_4138})
V_1313 = Vertex(name = 'V_1313',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_41,(0,0):C.GC_38,(2,0):C.GC_39,(1,3):C.GC_38,(3,3):C.GC_39,(1,1):C.GC_38,(3,1):C.GC_39,(1,2):C.GC_10,(0,4):C.GC_38,(2,4):C.GC_39,(0,5):C.GC_10})
V_1314 = Vertex(name = 'V_1314',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_42,(0,7):C.GC_42,(0,0):C.GC_4080,(2,0):C.GC_4084,(1,3):C.GC_4080,(3,3):C.GC_4084,(1,1):C.GC_4080,(3,1):C.GC_4084,(1,2):C.GC_11,(0,4):C.GC_4080,(2,4):C.GC_4084,(0,5):C.GC_11})
V_1315 = Vertex(name = 'V_1315',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_44,(0,7):C.GC_44,(0,0):C.GC_3569,(2,0):C.GC_3572,(1,3):C.GC_3569,(3,3):C.GC_3572,(1,1):C.GC_3569,(3,1):C.GC_3572,(1,2):C.GC_448,(0,4):C.GC_3569,(2,4):C.GC_3572,(0,5):C.GC_448})
V_1316 = Vertex(name = 'V_1316',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF14, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_45,(0,3):C.GC_45,(1,0):C.GC_449,(0,1):C.GC_449})
V_1317 = Vertex(name = 'V_1317',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4114,(0,1):C.GC_4114})
V_1318 = Vertex(name = 'V_1318',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4122,(0,1):C.GC_4122})
V_1319 = Vertex(name = 'V_1319',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4132,(0,1):C.GC_4132})
V_1320 = Vertex(name = 'V_1320',
particles = [ P.b__tilde__, P.b, P.b__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4140,(0,1):C.GC_4140})
V_1321 = Vertex(name = 'V_1321',
particles = [ P.e__plus__, P.e__minus__, P.e__plus__, P.e__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(0,6):C.GC_23,(0,7):C.GC_23,(0,0):C.GC_22,(0,3):C.GC_22,(0,1):C.GC_22,(0,2):C.GC_13,(0,4):C.GC_22,(0,5):C.GC_13})
V_1322 = Vertex(name = 'V_1322',
particles = [ P.e__plus__, P.e__minus__, P.e__plus__, P.e__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_24,(0,1):C.GC_24})
V_1323 = Vertex(name = 'V_1323',
particles = [ P.mu__plus__, P.e__minus__, P.e__plus__, P.mu__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(0,3):C.GC_23,(0,4):C.GC_24,(0,2):C.GC_22,(0,0):C.GC_22,(0,1):C.GC_13})
V_1324 = Vertex(name = 'V_1324',
particles = [ P.ta__plus__, P.e__minus__, P.e__plus__, P.ta__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(0,3):C.GC_23,(0,4):C.GC_24,(0,2):C.GC_22,(0,0):C.GC_22,(0,1):C.GC_13})
V_1325 = Vertex(name = 'V_1325',
particles = [ P.mu__plus__, P.mu__minus__, P.mu__plus__, P.mu__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(0,6):C.GC_23,(0,7):C.GC_23,(0,0):C.GC_22,(0,3):C.GC_22,(0,1):C.GC_22,(0,2):C.GC_13,(0,4):C.GC_22,(0,5):C.GC_13})
V_1326 = Vertex(name = 'V_1326',
particles = [ P.mu__plus__, P.mu__minus__, P.mu__plus__, P.mu__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_24,(0,1):C.GC_24})
V_1327 = Vertex(name = 'V_1327',
particles = [ P.ta__plus__, P.mu__minus__, P.mu__plus__, P.ta__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(0,3):C.GC_23,(0,4):C.GC_24,(0,2):C.GC_22,(0,0):C.GC_22,(0,1):C.GC_13})
V_1328 = Vertex(name = 'V_1328',
particles = [ P.ta__plus__, P.ta__minus__, P.ta__plus__, P.ta__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(0,6):C.GC_23,(0,7):C.GC_23,(0,0):C.GC_22,(0,3):C.GC_22,(0,1):C.GC_22,(0,2):C.GC_13,(0,4):C.GC_22,(0,5):C.GC_13})
V_1329 = Vertex(name = 'V_1329',
particles = [ P.ta__plus__, P.ta__minus__, P.ta__plus__, P.ta__minus__ ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_24,(0,1):C.GC_24})
V_1330 = Vertex(name = 'V_1330',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_614,(0,0):C.GC_614})
V_1331 = Vertex(name = 'V_1331',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_2688,(0,2):C.GC_557,(0,3):C.GC_556,(0,5):C.GC_2744,(0,0):C.GC_2744})
V_1332 = Vertex(name = 'V_1332',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2685})
V_1333 = Vertex(name = 'V_1333',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2687})
V_1334 = Vertex(name = 'V_1334',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_671,(0,0):C.GC_671})
V_1335 = Vertex(name = 'V_1335',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_2688,(0,2):C.GC_557,(0,3):C.GC_556,(0,5):C.GC_2745,(0,0):C.GC_2745})
V_1336 = Vertex(name = 'V_1336',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2685})
V_1337 = Vertex(name = 'V_1337',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2687})
V_1338 = Vertex(name = 'V_1338',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_942,(0,0):C.GC_942})
V_1339 = Vertex(name = 'V_1339',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_2688,(0,2):C.GC_557,(0,3):C.GC_556,(0,5):C.GC_2746,(0,0):C.GC_2746})
V_1340 = Vertex(name = 'V_1340',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2685})
V_1341 = Vertex(name = 'V_1341',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2687})
V_1342 = Vertex(name = 'V_1342',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_2957,(0,1):C.GC_2963,(0,3):C.GC_3063,(0,0):C.GC_3091})
V_1343 = Vertex(name = 'V_1343',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2959})
V_1344 = Vertex(name = 'V_1344',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_2957,(0,1):C.GC_2963,(0,3):C.GC_3066,(0,0):C.GC_3092})
V_1345 = Vertex(name = 'V_1345',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2959})
V_1346 = Vertex(name = 'V_1346',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_2957,(0,1):C.GC_2963,(0,3):C.GC_3094,(0,0):C.GC_3096})
V_1347 = Vertex(name = 'V_1347',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2959})
V_1348 = Vertex(name = 'V_1348',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_2958,(0,1):C.GC_2964,(0,3):C.GC_3064,(0,0):C.GC_3062})
V_1349 = Vertex(name = 'V_1349',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2961})
V_1350 = Vertex(name = 'V_1350',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_2958,(0,1):C.GC_2964,(0,3):C.GC_3067,(0,0):C.GC_3065})
V_1351 = Vertex(name = 'V_1351',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2961})
V_1352 = Vertex(name = 'V_1352',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_2958,(0,1):C.GC_2964,(0,3):C.GC_3095,(0,0):C.GC_3093})
V_1353 = Vertex(name = 'V_1353',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.d ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2961})
V_1354 = Vertex(name = 'V_1354',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_3336,(0,1):C.GC_3345,(0,3):C.GC_3518,(0,0):C.GC_3442})
V_1355 = Vertex(name = 'V_1355',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3339})
V_1356 = Vertex(name = 'V_1356',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_3336,(0,1):C.GC_3345,(0,3):C.GC_3521,(0,0):C.GC_3444})
V_1357 = Vertex(name = 'V_1357',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3339})
V_1358 = Vertex(name = 'V_1358',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_3336,(0,1):C.GC_3345,(0,3):C.GC_3526,(0,0):C.GC_3525})
V_1359 = Vertex(name = 'V_1359',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3339})
V_1360 = Vertex(name = 'V_1360',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_727,(0,0):C.GC_727})
V_1361 = Vertex(name = 'V_1361',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_3346,(0,2):C.GC_740,(0,3):C.GC_739,(0,5):C.GC_3519,(0,0):C.GC_3519})
V_1362 = Vertex(name = 'V_1362',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3337})
V_1363 = Vertex(name = 'V_1363',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3341})
V_1364 = Vertex(name = 'V_1364',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_731,(0,0):C.GC_731})
V_1365 = Vertex(name = 'V_1365',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_3346,(0,2):C.GC_740,(0,3):C.GC_739,(0,5):C.GC_3522,(0,0):C.GC_3522})
V_1366 = Vertex(name = 'V_1366',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3337})
V_1367 = Vertex(name = 'V_1367',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3341})
V_1368 = Vertex(name = 'V_1368',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_946,(0,0):C.GC_946})
V_1369 = Vertex(name = 'V_1369',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_3346,(0,2):C.GC_740,(0,3):C.GC_739,(0,5):C.GC_3527,(0,0):C.GC_3527})
V_1370 = Vertex(name = 'V_1370',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3337})
V_1371 = Vertex(name = 'V_1371',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3341})
V_1372 = Vertex(name = 'V_1372',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_3338,(0,1):C.GC_3347,(0,3):C.GC_3520,(0,0):C.GC_3441})
V_1373 = Vertex(name = 'V_1373',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3343})
V_1374 = Vertex(name = 'V_1374',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_3338,(0,1):C.GC_3347,(0,3):C.GC_3523,(0,0):C.GC_3443})
V_1375 = Vertex(name = 'V_1375',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3343})
V_1376 = Vertex(name = 'V_1376',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_3338,(0,1):C.GC_3347,(0,3):C.GC_3528,(0,0):C.GC_3524})
V_1377 = Vertex(name = 'V_1377',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.s ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3343})
V_1378 = Vertex(name = 'V_1378',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_4096,(0,1):C.GC_4105,(0,3):C.GC_4258,(0,0):C.GC_4261})
V_1379 = Vertex(name = 'V_1379',
particles = [ P.e__plus__, P.e__minus__, P.d__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4099})
V_1380 = Vertex(name = 'V_1380',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_4096,(0,1):C.GC_4105,(0,3):C.GC_4262,(0,0):C.GC_4265})
V_1381 = Vertex(name = 'V_1381',
particles = [ P.mu__plus__, P.mu__minus__, P.d__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4099})
V_1382 = Vertex(name = 'V_1382',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_4096,(0,1):C.GC_4105,(0,3):C.GC_4291,(0,0):C.GC_4294})
V_1383 = Vertex(name = 'V_1383',
particles = [ P.ta__plus__, P.ta__minus__, P.d__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4099})
V_1384 = Vertex(name = 'V_1384',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_4097,(0,1):C.GC_4106,(0,3):C.GC_4259,(0,0):C.GC_4289})
V_1385 = Vertex(name = 'V_1385',
particles = [ P.e__plus__, P.e__minus__, P.s__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4101})
V_1386 = Vertex(name = 'V_1386',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_4097,(0,1):C.GC_4106,(0,3):C.GC_4263,(0,0):C.GC_4290})
V_1387 = Vertex(name = 'V_1387',
particles = [ P.mu__plus__, P.mu__minus__, P.s__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4101})
V_1388 = Vertex(name = 'V_1388',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF4, L.FFFF8 ],
couplings = {(0,2):C.GC_4097,(0,1):C.GC_4106,(0,3):C.GC_4292,(0,0):C.GC_4295})
V_1389 = Vertex(name = 'V_1389',
particles = [ P.ta__plus__, P.ta__minus__, P.s__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4101})
V_1390 = Vertex(name = 'V_1390',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_598,(0,0):C.GC_598})
V_1391 = Vertex(name = 'V_1391',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_4107,(0,2):C.GC_451,(0,3):C.GC_450,(0,5):C.GC_4260,(0,0):C.GC_4260})
V_1392 = Vertex(name = 'V_1392',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4098})
V_1393 = Vertex(name = 'V_1393',
particles = [ P.e__plus__, P.e__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4103})
V_1394 = Vertex(name = 'V_1394',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_655,(0,0):C.GC_655})
V_1395 = Vertex(name = 'V_1395',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_4107,(0,2):C.GC_451,(0,3):C.GC_450,(0,5):C.GC_4264,(0,0):C.GC_4264})
V_1396 = Vertex(name = 'V_1396',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4098})
V_1397 = Vertex(name = 'V_1397',
particles = [ P.mu__plus__, P.mu__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4103})
V_1398 = Vertex(name = 'V_1398',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_25,(0,1):C.GC_40,(0,2):C.GC_21,(0,3):C.GC_12,(0,5):C.GC_926,(0,0):C.GC_926})
V_1399 = Vertex(name = 'V_1399',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF4, L.FFFF8 ],
couplings = {(0,4):C.GC_27,(0,1):C.GC_4107,(0,2):C.GC_451,(0,3):C.GC_450,(0,5):C.GC_4293,(0,0):C.GC_4293})
V_1400 = Vertex(name = 'V_1400',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4098})
V_1401 = Vertex(name = 'V_1401',
particles = [ P.ta__plus__, P.ta__minus__, P.b__tilde__, P.b ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4103})
V_1402 = Vertex(name = 'V_1402',
particles = [ P.ve__tilde__, P.e__minus__, P.d__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_28,(0,5):C.GC_1042,(0,3):C.GC_1041,(0,4):C.GC_1041,(0,1):C.GC_1036,(0,0):C.GC_615})
V_1403 = Vertex(name = 'V_1403',
particles = [ P.ve__tilde__, P.e__minus__, P.d__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1093,(0,5):C.GC_1124,(0,3):C.GC_1123,(0,4):C.GC_1123,(0,1):C.GC_1120,(0,0):C.GC_1112})
V_1404 = Vertex(name = 'V_1404',
particles = [ P.ve__tilde__, P.e__minus__, P.d__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_31,(0,5):C.GC_609,(0,3):C.GC_608,(0,4):C.GC_608,(0,1):C.GC_603,(0,0):C.GC_616})
V_1405 = Vertex(name = 'V_1405',
particles = [ P.ve__tilde__, P.e__minus__, P.d__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_563,(0,5):C.GC_624,(0,3):C.GC_623,(0,4):C.GC_623,(0,1):C.GC_622,(0,0):C.GC_625})
V_1406 = Vertex(name = 'V_1406',
particles = [ P.ve__tilde__, P.e__minus__, P.d__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_34,(0,5):C.GC_820,(0,3):C.GC_819,(0,4):C.GC_819,(0,1):C.GC_814,(0,0):C.GC_617})
V_1407 = Vertex(name = 'V_1407',
particles = [ P.ve__tilde__, P.e__minus__, P.d__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_855,(0,5):C.GC_883,(0,3):C.GC_882,(0,4):C.GC_882,(0,1):C.GC_879,(0,0):C.GC_874})
V_1408 = Vertex(name = 'V_1408',
particles = [ P.ve__tilde__, P.e__minus__, P.s__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_29,(0,5):C.GC_1044,(0,3):C.GC_1043,(0,4):C.GC_1043,(0,1):C.GC_1037,(0,0):C.GC_728})
V_1409 = Vertex(name = 'V_1409',
particles = [ P.ve__tilde__, P.e__minus__, P.s__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1094,(0,5):C.GC_1126,(0,3):C.GC_1125,(0,4):C.GC_1125,(0,1):C.GC_1121,(0,0):C.GC_1115})
V_1410 = Vertex(name = 'V_1410',
particles = [ P.ve__tilde__, P.e__minus__, P.s__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_32,(0,5):C.GC_611,(0,3):C.GC_610,(0,4):C.GC_610,(0,1):C.GC_604,(0,0):C.GC_729})
V_1411 = Vertex(name = 'V_1411',
particles = [ P.ve__tilde__, P.e__minus__, P.s__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_748,(0,5):C.GC_756,(0,3):C.GC_755,(0,4):C.GC_755,(0,1):C.GC_754,(0,0):C.GC_760})
V_1412 = Vertex(name = 'V_1412',
particles = [ P.ve__tilde__, P.e__minus__, P.s__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_35,(0,5):C.GC_822,(0,3):C.GC_821,(0,4):C.GC_821,(0,1):C.GC_815,(0,0):C.GC_730})
V_1413 = Vertex(name = 'V_1413',
particles = [ P.ve__tilde__, P.e__minus__, P.s__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_856,(0,5):C.GC_885,(0,3):C.GC_884,(0,4):C.GC_884,(0,1):C.GC_880,(0,0):C.GC_877})
V_1414 = Vertex(name = 'V_1414',
particles = [ P.ve__tilde__, P.e__minus__, P.b__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_30,(0,5):C.GC_1046,(0,3):C.GC_1045,(0,4):C.GC_1045,(0,1):C.GC_1038,(0,0):C.GC_599})
V_1415 = Vertex(name = 'V_1415',
particles = [ P.ve__tilde__, P.e__minus__, P.b__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1095,(0,5):C.GC_1128,(0,3):C.GC_1127,(0,4):C.GC_1127,(0,1):C.GC_1122,(0,0):C.GC_1111})
V_1416 = Vertex(name = 'V_1416',
particles = [ P.ve__tilde__, P.e__minus__, P.b__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_33,(0,5):C.GC_613,(0,3):C.GC_612,(0,4):C.GC_612,(0,1):C.GC_605,(0,0):C.GC_600})
V_1417 = Vertex(name = 'V_1417',
particles = [ P.ve__tilde__, P.e__minus__, P.b__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_504,(0,5):C.GC_621,(0,3):C.GC_620,(0,4):C.GC_620,(0,1):C.GC_619,(0,0):C.GC_618})
V_1418 = Vertex(name = 'V_1418',
particles = [ P.ve__tilde__, P.e__minus__, P.b__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_36,(0,5):C.GC_824,(0,3):C.GC_823,(0,4):C.GC_823,(0,1):C.GC_816,(0,0):C.GC_601})
V_1419 = Vertex(name = 'V_1419',
particles = [ P.ve__tilde__, P.e__minus__, P.b__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_857,(0,5):C.GC_887,(0,3):C.GC_886,(0,4):C.GC_886,(0,1):C.GC_881,(0,0):C.GC_873})
V_1420 = Vertex(name = 'V_1420',
particles = [ P.vm__tilde__, P.mu__minus__, P.d__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_28,(0,5):C.GC_1054,(0,3):C.GC_1053,(0,4):C.GC_1053,(0,1):C.GC_1048,(0,0):C.GC_672})
V_1421 = Vertex(name = 'V_1421',
particles = [ P.vm__tilde__, P.mu__minus__, P.d__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1093,(0,5):C.GC_1133,(0,3):C.GC_1132,(0,4):C.GC_1132,(0,1):C.GC_1129,(0,0):C.GC_1114})
V_1422 = Vertex(name = 'V_1422',
particles = [ P.vm__tilde__, P.mu__minus__, P.d__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_31,(0,5):C.GC_666,(0,3):C.GC_665,(0,4):C.GC_665,(0,1):C.GC_660,(0,0):C.GC_673})
V_1423 = Vertex(name = 'V_1423',
particles = [ P.vm__tilde__, P.mu__minus__, P.d__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_563,(0,5):C.GC_681,(0,3):C.GC_680,(0,4):C.GC_680,(0,1):C.GC_679,(0,0):C.GC_682})
V_1424 = Vertex(name = 'V_1424',
particles = [ P.vm__tilde__, P.mu__minus__, P.d__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_34,(0,5):C.GC_832,(0,3):C.GC_831,(0,4):C.GC_831,(0,1):C.GC_826,(0,0):C.GC_674})
V_1425 = Vertex(name = 'V_1425',
particles = [ P.vm__tilde__, P.mu__minus__, P.d__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_855,(0,5):C.GC_892,(0,3):C.GC_891,(0,4):C.GC_891,(0,1):C.GC_888,(0,0):C.GC_876})
V_1426 = Vertex(name = 'V_1426',
particles = [ P.vm__tilde__, P.mu__minus__, P.s__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_29,(0,5):C.GC_1056,(0,3):C.GC_1055,(0,4):C.GC_1055,(0,1):C.GC_1049,(0,0):C.GC_732})
V_1427 = Vertex(name = 'V_1427',
particles = [ P.vm__tilde__, P.mu__minus__, P.s__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1094,(0,5):C.GC_1135,(0,3):C.GC_1134,(0,4):C.GC_1134,(0,1):C.GC_1130,(0,0):C.GC_1116})
V_1428 = Vertex(name = 'V_1428',
particles = [ P.vm__tilde__, P.mu__minus__, P.s__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_32,(0,5):C.GC_668,(0,3):C.GC_667,(0,4):C.GC_667,(0,1):C.GC_661,(0,0):C.GC_733})
V_1429 = Vertex(name = 'V_1429',
particles = [ P.vm__tilde__, P.mu__minus__, P.s__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_748,(0,5):C.GC_759,(0,3):C.GC_758,(0,4):C.GC_758,(0,1):C.GC_757,(0,0):C.GC_761})
V_1430 = Vertex(name = 'V_1430',
particles = [ P.vm__tilde__, P.mu__minus__, P.s__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_35,(0,5):C.GC_834,(0,3):C.GC_833,(0,4):C.GC_833,(0,1):C.GC_827,(0,0):C.GC_734})
V_1431 = Vertex(name = 'V_1431',
particles = [ P.vm__tilde__, P.mu__minus__, P.s__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_856,(0,5):C.GC_894,(0,3):C.GC_893,(0,4):C.GC_893,(0,1):C.GC_889,(0,0):C.GC_878})
V_1432 = Vertex(name = 'V_1432',
particles = [ P.vm__tilde__, P.mu__minus__, P.b__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_30,(0,5):C.GC_1058,(0,3):C.GC_1057,(0,4):C.GC_1057,(0,1):C.GC_1050,(0,0):C.GC_656})
V_1433 = Vertex(name = 'V_1433',
particles = [ P.vm__tilde__, P.mu__minus__, P.b__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1095,(0,5):C.GC_1137,(0,3):C.GC_1136,(0,4):C.GC_1136,(0,1):C.GC_1131,(0,0):C.GC_1113})
V_1434 = Vertex(name = 'V_1434',
particles = [ P.vm__tilde__, P.mu__minus__, P.b__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_33,(0,5):C.GC_670,(0,3):C.GC_669,(0,4):C.GC_669,(0,1):C.GC_662,(0,0):C.GC_657})
V_1435 = Vertex(name = 'V_1435',
particles = [ P.vm__tilde__, P.mu__minus__, P.b__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_504,(0,5):C.GC_678,(0,3):C.GC_677,(0,4):C.GC_677,(0,1):C.GC_676,(0,0):C.GC_675})
V_1436 = Vertex(name = 'V_1436',
particles = [ P.vm__tilde__, P.mu__minus__, P.b__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_36,(0,5):C.GC_836,(0,3):C.GC_835,(0,4):C.GC_835,(0,1):C.GC_828,(0,0):C.GC_658})
V_1437 = Vertex(name = 'V_1437',
particles = [ P.vm__tilde__, P.mu__minus__, P.b__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_857,(0,5):C.GC_896,(0,3):C.GC_895,(0,4):C.GC_895,(0,1):C.GC_890,(0,0):C.GC_875})
V_1438 = Vertex(name = 'V_1438',
particles = [ P.vt__tilde__, P.ta__minus__, P.d__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_28,(0,5):C.GC_1075,(0,3):C.GC_1074,(0,4):C.GC_1074,(0,1):C.GC_1069,(0,0):C.GC_943})
V_1439 = Vertex(name = 'V_1439',
particles = [ P.vt__tilde__, P.ta__minus__, P.d__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1093,(0,5):C.GC_1142,(0,3):C.GC_1141,(0,4):C.GC_1141,(0,1):C.GC_1138,(0,0):C.GC_1118})
V_1440 = Vertex(name = 'V_1440',
particles = [ P.vt__tilde__, P.ta__minus__, P.d__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_31,(0,5):C.GC_937,(0,3):C.GC_936,(0,4):C.GC_936,(0,1):C.GC_931,(0,0):C.GC_944})
V_1441 = Vertex(name = 'V_1441',
particles = [ P.vt__tilde__, P.ta__minus__, P.d__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_563,(0,5):C.GC_968,(0,3):C.GC_967,(0,4):C.GC_967,(0,1):C.GC_966,(0,0):C.GC_969})
V_1442 = Vertex(name = 'V_1442',
particles = [ P.vt__tilde__, P.ta__minus__, P.d__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_34,(0,5):C.GC_957,(0,3):C.GC_956,(0,4):C.GC_956,(0,1):C.GC_951,(0,0):C.GC_945})
V_1443 = Vertex(name = 'V_1443',
particles = [ P.vt__tilde__, P.ta__minus__, P.d__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_855,(0,5):C.GC_981,(0,3):C.GC_980,(0,4):C.GC_980,(0,1):C.GC_977,(0,0):C.GC_975})
V_1444 = Vertex(name = 'V_1444',
particles = [ P.vt__tilde__, P.ta__minus__, P.s__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_29,(0,5):C.GC_1077,(0,3):C.GC_1076,(0,4):C.GC_1076,(0,1):C.GC_1070,(0,0):C.GC_947})
V_1445 = Vertex(name = 'V_1445',
particles = [ P.vt__tilde__, P.ta__minus__, P.s__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1094,(0,5):C.GC_1144,(0,3):C.GC_1143,(0,4):C.GC_1143,(0,1):C.GC_1139,(0,0):C.GC_1119})
V_1446 = Vertex(name = 'V_1446',
particles = [ P.vt__tilde__, P.ta__minus__, P.s__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_32,(0,5):C.GC_939,(0,3):C.GC_938,(0,4):C.GC_938,(0,1):C.GC_932,(0,0):C.GC_948})
V_1447 = Vertex(name = 'V_1447',
particles = [ P.vt__tilde__, P.ta__minus__, P.s__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_748,(0,5):C.GC_972,(0,3):C.GC_971,(0,4):C.GC_971,(0,1):C.GC_970,(0,0):C.GC_973})
V_1448 = Vertex(name = 'V_1448',
particles = [ P.vt__tilde__, P.ta__minus__, P.s__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_35,(0,5):C.GC_959,(0,3):C.GC_958,(0,4):C.GC_958,(0,1):C.GC_952,(0,0):C.GC_949})
V_1449 = Vertex(name = 'V_1449',
particles = [ P.vt__tilde__, P.ta__minus__, P.s__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_856,(0,5):C.GC_983,(0,3):C.GC_982,(0,4):C.GC_982,(0,1):C.GC_978,(0,0):C.GC_976})
V_1450 = Vertex(name = 'V_1450',
particles = [ P.vt__tilde__, P.ta__minus__, P.b__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_30,(0,5):C.GC_1079,(0,3):C.GC_1078,(0,4):C.GC_1078,(0,1):C.GC_1071,(0,0):C.GC_927})
V_1451 = Vertex(name = 'V_1451',
particles = [ P.vt__tilde__, P.ta__minus__, P.b__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_1095,(0,5):C.GC_1146,(0,3):C.GC_1145,(0,4):C.GC_1145,(0,1):C.GC_1140,(0,0):C.GC_1117})
V_1452 = Vertex(name = 'V_1452',
particles = [ P.vt__tilde__, P.ta__minus__, P.b__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_33,(0,5):C.GC_941,(0,3):C.GC_940,(0,4):C.GC_940,(0,1):C.GC_933,(0,0):C.GC_928})
V_1453 = Vertex(name = 'V_1453',
particles = [ P.vt__tilde__, P.ta__minus__, P.b__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_504,(0,5):C.GC_965,(0,3):C.GC_964,(0,4):C.GC_964,(0,1):C.GC_963,(0,0):C.GC_962})
V_1454 = Vertex(name = 'V_1454',
particles = [ P.vt__tilde__, P.ta__minus__, P.b__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_36,(0,5):C.GC_961,(0,3):C.GC_960,(0,4):C.GC_960,(0,1):C.GC_953,(0,0):C.GC_929})
V_1455 = Vertex(name = 'V_1455',
particles = [ P.vt__tilde__, P.ta__minus__, P.b__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF10, L.FFFF2, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,2):C.GC_857,(0,5):C.GC_985,(0,3):C.GC_984,(0,4):C.GC_984,(0,1):C.GC_979,(0,0):C.GC_974})
V_1456 = Vertex(name = 'V_1456',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_1028,(3,0):C.GC_1030,(0,5):C.GC_1157,(2,5):C.GC_1158,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_1028,(3,8):C.GC_1030,(0,1):C.GC_1157,(2,1):C.GC_1158})
V_1457 = Vertex(name = 'V_1457',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_3775,(1,0):C.GC_1245,(3,0):C.GC_1246,(0,5):C.GC_1029,(2,5):C.GC_1031,(1,4):C.GC_3704,(3,4):C.GC_3712,(1,2):C.GC_2681,(3,2):C.GC_2684,(1,3):C.GC_1097,(3,3):C.GC_1100,(1,8):C.GC_1245,(3,8):C.GC_1246,(0,1):C.GC_1029,(2,1):C.GC_1031})
V_1458 = Vertex(name = 'V_1458',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3746,(0,3):C.GC_1169,(1,1):C.GC_552,(2,1):C.GC_553,(1,0):C.GC_1273,(2,0):C.GC_1276})
V_1459 = Vertex(name = 'V_1459',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2904,(0,1):C.GC_1285})
V_1460 = Vertex(name = 'V_1460',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1160})
V_1461 = Vertex(name = 'V_1461',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1178})
V_1462 = Vertex(name = 'V_1462',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1280})
V_1463 = Vertex(name = 'V_1463',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1290})
V_1464 = Vertex(name = 'V_1464',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1857,(0,6):C.GC_1866,(1,0):C.GC_2642,(3,0):C.GC_2644,(0,4):C.GC_2641,(2,4):C.GC_2643,(1,3):C.GC_2409,(3,3):C.GC_2414,(1,2):C.GC_1981,(3,2):C.GC_1984,(1,7):C.GC_2403,(3,7):C.GC_2407,(0,1):C.GC_2401,(2,1):C.GC_2405})
V_1465 = Vertex(name = 'V_1465',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1875,(0,1):C.GC_2700})
V_1466 = Vertex(name = 'V_1466',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2694,(0,1):C.GC_2392})
V_1467 = Vertex(name = 'V_1467',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2390})
V_1468 = Vertex(name = 'V_1468',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2707})
V_1469 = Vertex(name = 'V_1469',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2439})
V_1470 = Vertex(name = 'V_1470',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2748,(0,6):C.GC_2757,(1,0):C.GC_4000,(3,0):C.GC_4004,(0,4):C.GC_3998,(2,4):C.GC_4002,(1,3):C.GC_3698,(3,3):C.GC_3706,(1,2):C.GC_2880,(3,2):C.GC_2883,(1,7):C.GC_3835,(3,7):C.GC_3839,(0,1):C.GC_3833,(2,1):C.GC_3837})
V_1471 = Vertex(name = 'V_1471',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2766,(0,1):C.GC_4125})
V_1472 = Vertex(name = 'V_1472',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_4117,(0,1):C.GC_3692})
V_1473 = Vertex(name = 'V_1473',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3688})
V_1474 = Vertex(name = 'V_1474',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4136})
V_1475 = Vertex(name = 'V_1475',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3740})
V_1476 = Vertex(name = 'V_1476',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1163,(0,6):C.GC_1172,(1,0):C.GC_1632,(3,0):C.GC_1634,(0,4):C.GC_1631,(2,4):C.GC_1633,(1,3):C.GC_1635,(3,3):C.GC_1637,(1,2):C.GC_1239,(3,2):C.GC_1242,(1,7):C.GC_1743,(3,7):C.GC_1747,(0,1):C.GC_1741,(2,1):C.GC_1745})
V_1477 = Vertex(name = 'V_1477',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1181,(0,1):C.GC_1826})
V_1478 = Vertex(name = 'V_1478',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1817,(0,1):C.GC_1629})
V_1479 = Vertex(name = 'V_1479',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1627})
V_1480 = Vertex(name = 'V_1480',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1836})
V_1481 = Vertex(name = 'V_1481',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1651})
V_1482 = Vertex(name = 'V_1482',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(0,7):C.GC_46,(1,6):C.GC_41,(1,0):C.GC_545,(3,0):C.GC_547,(0,5):C.GC_1147,(2,5):C.GC_1148,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_545,(3,8):C.GC_547,(0,1):C.GC_1147,(2,1):C.GC_1148})
V_1483 = Vertex(name = 'V_1483',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(0,7):C.GC_2710,(1,6):C.GC_43,(1,0):C.GC_1913,(3,0):C.GC_1918,(0,5):C.GC_546,(2,5):C.GC_548,(1,4):C.GC_2411,(3,4):C.GC_2416,(1,2):C.GC_2679,(3,2):C.GC_2682,(1,3):C.GC_564,(3,3):C.GC_565,(1,8):C.GC_1913,(3,8):C.GC_1918,(0,1):C.GC_546,(2,1):C.GC_548})
V_1484 = Vertex(name = 'V_1484',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(0,2):C.GC_1869,(1,1):C.GC_2690,(1,0):C.GC_1905,(2,0):C.GC_1908})
V_1485 = Vertex(name = 'V_1485',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,1):C.GC_1998,(1,0):C.GC_2905})
V_1486 = Vertex(name = 'V_1486',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1860})
V_1487 = Vertex(name = 'V_1487',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1878})
V_1488 = Vertex(name = 'V_1488',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1996})
V_1489 = Vertex(name = 'V_1489',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2000})
V_1490 = Vertex(name = 'V_1490',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2751,(0,6):C.GC_2760,(1,0):C.GC_3695,(3,0):C.GC_3697,(0,4):C.GC_3694,(2,4):C.GC_3696,(1,3):C.GC_3699,(3,3):C.GC_3707,(1,2):C.GC_2825,(3,2):C.GC_2828,(1,7):C.GC_3836,(3,7):C.GC_3840,(0,1):C.GC_3834,(2,1):C.GC_3838})
V_1491 = Vertex(name = 'V_1491',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2769,(0,1):C.GC_3944})
V_1492 = Vertex(name = 'V_1492',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3937,(0,1):C.GC_3693})
V_1493 = Vertex(name = 'V_1493',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3689})
V_1494 = Vertex(name = 'V_1494',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3957})
V_1495 = Vertex(name = 'V_1495',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3741})
V_1496 = Vertex(name = 'V_1496',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1166,(0,6):C.GC_1175,(1,0):C.GC_1703,(3,0):C.GC_1705,(0,4):C.GC_1702,(2,4):C.GC_1704,(1,3):C.GC_1636,(3,3):C.GC_1638,(1,2):C.GC_1264,(3,2):C.GC_1267,(1,7):C.GC_1744,(3,7):C.GC_1748,(0,1):C.GC_1742,(2,1):C.GC_1746})
V_1497 = Vertex(name = 'V_1497',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1184,(0,1):C.GC_1830})
V_1498 = Vertex(name = 'V_1498',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1821,(0,1):C.GC_1630})
V_1499 = Vertex(name = 'V_1499',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1628})
V_1500 = Vertex(name = 'V_1500',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1840})
V_1501 = Vertex(name = 'V_1501',
particles = [ P.u__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1652})
V_1502 = Vertex(name = 'V_1502',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1863,(0,6):C.GC_1872,(1,0):C.GC_2581,(3,0):C.GC_2583,(0,4):C.GC_2580,(2,4):C.GC_2582,(1,3):C.GC_2413,(3,3):C.GC_2418,(1,2):C.GC_1963,(3,2):C.GC_1966,(1,7):C.GC_2404,(3,7):C.GC_2408,(0,1):C.GC_2402,(2,1):C.GC_2406})
V_1503 = Vertex(name = 'V_1503',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1881,(0,1):C.GC_2619})
V_1504 = Vertex(name = 'V_1504',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2615,(0,1):C.GC_2393})
V_1505 = Vertex(name = 'V_1505',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2391})
V_1506 = Vertex(name = 'V_1506',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2624})
V_1507 = Vertex(name = 'V_1507',
particles = [ P.c__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2442})
V_1508 = Vertex(name = 'V_1508',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_806,(3,0):C.GC_808,(0,5):C.GC_1153,(2,5):C.GC_1154,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_806,(3,8):C.GC_808,(0,1):C.GC_1153,(2,1):C.GC_1154})
V_1509 = Vertex(name = 'V_1509',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_3772,(1,0):C.GC_2833,(3,0):C.GC_2838,(0,5):C.GC_807,(2,5):C.GC_809,(1,4):C.GC_3701,(3,4):C.GC_3709,(1,2):C.GC_2680,(3,2):C.GC_2683,(1,3):C.GC_859,(3,3):C.GC_862,(1,8):C.GC_2833,(3,8):C.GC_2838,(0,1):C.GC_807,(2,1):C.GC_809})
V_1510 = Vertex(name = 'V_1510',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3743,(0,2):C.GC_2763,(1,0):C.GC_2862,(2,0):C.GC_2865})
V_1511 = Vertex(name = 'V_1511',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3690,(0,1):C.GC_2906})
V_1512 = Vertex(name = 'V_1512',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2754})
V_1513 = Vertex(name = 'V_1513',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2772})
V_1514 = Vertex(name = 'V_1514',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2899})
V_1515 = Vertex(name = 'V_1515',
particles = [ P.t__tilde__, P.d, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2913})
V_1516 = Vertex(name = 'V_1516',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1161,(0,5):C.GC_1170,(1,0):C.GC_2939,(3,0):C.GC_2943,(0,3):C.GC_2937,(2,3):C.GC_2941,(1,2):C.GC_1274,(3,2):C.GC_1277,(1,6):C.GC_2946,(3,6):C.GC_2948,(0,1):C.GC_2945,(2,1):C.GC_2947})
V_1517 = Vertex(name = 'V_1517',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1179,(0,2):C.GC_2890,(1,0):C.GC_2953,(2,0):C.GC_2955})
V_1518 = Vertex(name = 'V_1518',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2889,(0,1):C.GC_2915})
V_1519 = Vertex(name = 'V_1519',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2907})
V_1520 = Vertex(name = 'V_1520',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2891})
V_1521 = Vertex(name = 'V_1521',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2965})
V_1522 = Vertex(name = 'V_1522',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1858,(0,5):C.GC_1867,(1,0):C.GC_1989,(3,0):C.GC_1993,(0,3):C.GC_1987,(2,3):C.GC_1991,(1,2):C.GC_1982,(3,2):C.GC_1985,(1,6):C.GC_1941,(3,6):C.GC_1947,(0,1):C.GC_1938,(2,1):C.GC_1944})
V_1523 = Vertex(name = 'V_1523',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1876,(0,1):C.GC_2028})
V_1524 = Vertex(name = 'V_1524',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2026})
V_1525 = Vertex(name = 'V_1525',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2030})
V_1526 = Vertex(name = 'V_1526',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2749,(0,5):C.GC_2758,(1,0):C.GC_2874,(3,0):C.GC_2878,(0,3):C.GC_2872,(2,3):C.GC_2876,(1,2):C.GC_2881,(3,2):C.GC_2884,(1,6):C.GC_2853,(3,6):C.GC_2859,(0,1):C.GC_2850,(2,1):C.GC_2856})
V_1527 = Vertex(name = 'V_1527',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2767,(0,1):C.GC_2971})
V_1528 = Vertex(name = 'V_1528',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2967})
V_1529 = Vertex(name = 'V_1529',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2977})
V_1530 = Vertex(name = 'V_1530',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1164,(0,6):C.GC_1173,(1,0):C.GC_1204,(3,0):C.GC_1208,(0,4):C.GC_1202,(2,4):C.GC_1206,(1,3):C.GC_725,(3,3):C.GC_726,(1,2):C.GC_1240,(3,2):C.GC_1243,(1,7):C.GC_1258,(3,7):C.GC_1262,(0,1):C.GC_1256,(2,1):C.GC_1260})
V_1531 = Vertex(name = 'V_1531',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1182,(0,1):C.GC_1286})
V_1532 = Vertex(name = 'V_1532',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1281})
V_1533 = Vertex(name = 'V_1533',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1291})
V_1534 = Vertex(name = 'V_1534',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1861,(0,5):C.GC_2915,(1,0):C.GC_1914,(3,0):C.GC_1919,(0,3):C.GC_1911,(2,3):C.GC_1916,(1,2):C.GC_1906,(3,2):C.GC_1909,(1,6):C.GC_1942,(3,6):C.GC_1948,(0,1):C.GC_1939,(2,1):C.GC_1945})
V_1535 = Vertex(name = 'V_1535',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1879,(0,2):C.GC_1870,(1,0):C.GC_2953,(2,0):C.GC_2955})
V_1536 = Vertex(name = 'V_1536',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2965,(0,1):C.GC_2014})
V_1537 = Vertex(name = 'V_1537',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2013})
V_1538 = Vertex(name = 'V_1538',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2974})
V_1539 = Vertex(name = 'V_1539',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2015})
V_1540 = Vertex(name = 'V_1540',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2752,(0,5):C.GC_2761,(1,0):C.GC_2799,(3,0):C.GC_2803,(0,3):C.GC_2797,(2,3):C.GC_2801,(1,2):C.GC_2826,(3,2):C.GC_2829,(1,6):C.GC_2854,(3,6):C.GC_2860,(0,1):C.GC_2851,(2,1):C.GC_2857})
V_1541 = Vertex(name = 'V_1541',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2770,(0,1):C.GC_2902})
V_1542 = Vertex(name = 'V_1542',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2897})
V_1543 = Vertex(name = 'V_1543',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2911})
V_1544 = Vertex(name = 'V_1544',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1167,(0,5):C.GC_1176,(1,0):C.GC_1224,(3,0):C.GC_1228,(0,3):C.GC_1222,(2,3):C.GC_1226,(1,2):C.GC_1265,(3,2):C.GC_1268,(1,6):C.GC_1259,(3,6):C.GC_1263,(0,1):C.GC_1257,(2,1):C.GC_1261})
V_1545 = Vertex(name = 'V_1545',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1185,(0,1):C.GC_1288})
V_1546 = Vertex(name = 'V_1546',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1283})
V_1547 = Vertex(name = 'V_1547',
particles = [ P.u__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1293})
V_1548 = Vertex(name = 'V_1548',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1864,(0,5):C.GC_1873,(1,0):C.GC_1971,(3,0):C.GC_1975,(0,3):C.GC_1969,(2,3):C.GC_1973,(1,2):C.GC_1964,(3,2):C.GC_1967,(1,6):C.GC_1943,(3,6):C.GC_1949,(0,1):C.GC_1940,(2,1):C.GC_1946})
V_1549 = Vertex(name = 'V_1549',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1882,(0,1):C.GC_2018})
V_1550 = Vertex(name = 'V_1550',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2016})
V_1551 = Vertex(name = 'V_1551',
particles = [ P.c__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2020})
V_1552 = Vertex(name = 'V_1552',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2965,(0,5):C.GC_2915,(1,0):C.GC_2834,(3,0):C.GC_2839,(0,3):C.GC_2831,(2,3):C.GC_2836,(1,2):C.GC_2953,(3,2):C.GC_2955,(1,6):C.GC_2855,(3,6):C.GC_2861,(0,1):C.GC_2852,(2,1):C.GC_2858})
V_1553 = Vertex(name = 'V_1553',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2974,(0,2):C.GC_2764,(1,0):C.GC_2863,(2,0):C.GC_2866})
V_1554 = Vertex(name = 'V_1554',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2755,(0,1):C.GC_2908})
V_1555 = Vertex(name = 'V_1555',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2773})
V_1556 = Vertex(name = 'V_1556',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2900})
V_1557 = Vertex(name = 'V_1557',
particles = [ P.t__tilde__, P.d, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2914})
V_1558 = Vertex(name = 'V_1558',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1162,(0,5):C.GC_1171,(1,0):C.GC_2940,(3,0):C.GC_2944,(0,3):C.GC_2938,(2,3):C.GC_2942,(1,2):C.GC_1275,(3,2):C.GC_1278,(1,6):C.GC_2934,(3,6):C.GC_2936,(0,1):C.GC_2933,(2,1):C.GC_2935})
V_1559 = Vertex(name = 'V_1559',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1180,(0,2):C.GC_2887,(1,0):C.GC_2954,(2,0):C.GC_2956})
V_1560 = Vertex(name = 'V_1560',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2886,(0,1):C.GC_2917})
V_1561 = Vertex(name = 'V_1561',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2909})
V_1562 = Vertex(name = 'V_1562',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2888})
V_1563 = Vertex(name = 'V_1563',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2966})
V_1564 = Vertex(name = 'V_1564',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1859,(0,5):C.GC_1868,(1,0):C.GC_1990,(3,0):C.GC_1994,(0,3):C.GC_1988,(2,3):C.GC_1992,(1,2):C.GC_1983,(3,2):C.GC_1986,(1,6):C.GC_1896,(3,6):C.GC_1902,(0,1):C.GC_1893,(2,1):C.GC_1899})
V_1565 = Vertex(name = 'V_1565',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1877,(0,1):C.GC_2029})
V_1566 = Vertex(name = 'V_1566',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2027})
V_1567 = Vertex(name = 'V_1567',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2031})
V_1568 = Vertex(name = 'V_1568',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2750,(0,5):C.GC_2759,(1,0):C.GC_2875,(3,0):C.GC_2879,(0,3):C.GC_2873,(2,3):C.GC_2877,(1,2):C.GC_2882,(3,2):C.GC_2885,(1,6):C.GC_2816,(3,6):C.GC_2822,(0,1):C.GC_2813,(2,1):C.GC_2819})
V_1569 = Vertex(name = 'V_1569',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2768,(0,1):C.GC_2972})
V_1570 = Vertex(name = 'V_1570',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2968})
V_1571 = Vertex(name = 'V_1571',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2978})
V_1572 = Vertex(name = 'V_1572',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1165,(0,5):C.GC_1174,(1,0):C.GC_1205,(3,0):C.GC_1209,(0,3):C.GC_1203,(2,3):C.GC_1207,(1,2):C.GC_1241,(3,2):C.GC_1244,(1,6):C.GC_1233,(3,6):C.GC_1237,(0,1):C.GC_1231,(2,1):C.GC_1235})
V_1573 = Vertex(name = 'V_1573',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1183,(0,1):C.GC_1287})
V_1574 = Vertex(name = 'V_1574',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1282})
V_1575 = Vertex(name = 'V_1575',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1292})
V_1576 = Vertex(name = 'V_1576',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1862,(0,5):C.GC_2917,(1,0):C.GC_1915,(3,0):C.GC_1920,(0,3):C.GC_1912,(2,3):C.GC_1917,(1,2):C.GC_1907,(3,2):C.GC_1910,(1,6):C.GC_1897,(3,6):C.GC_1903,(0,1):C.GC_1894,(2,1):C.GC_1900})
V_1577 = Vertex(name = 'V_1577',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1880,(0,2):C.GC_1871,(1,0):C.GC_2954,(2,0):C.GC_2956})
V_1578 = Vertex(name = 'V_1578',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1997,(0,1):C.GC_1999})
V_1579 = Vertex(name = 'V_1579',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2001})
V_1580 = Vertex(name = 'V_1580',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2966})
V_1581 = Vertex(name = 'V_1581',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2976})
V_1582 = Vertex(name = 'V_1582',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2753,(0,5):C.GC_2762,(1,0):C.GC_2800,(3,0):C.GC_2804,(0,3):C.GC_2798,(2,3):C.GC_2802,(1,2):C.GC_2827,(3,2):C.GC_2830,(1,6):C.GC_2817,(3,6):C.GC_2823,(0,1):C.GC_2814,(2,1):C.GC_2820})
V_1583 = Vertex(name = 'V_1583',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2771,(0,1):C.GC_2903})
V_1584 = Vertex(name = 'V_1584',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2898})
V_1585 = Vertex(name = 'V_1585',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2912})
V_1586 = Vertex(name = 'V_1586',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1168,(0,6):C.GC_1177,(1,0):C.GC_1225,(3,0):C.GC_1229,(0,4):C.GC_1223,(2,4):C.GC_1227,(1,3):C.GC_543,(3,3):C.GC_544,(1,2):C.GC_1266,(3,2):C.GC_1269,(1,7):C.GC_1234,(3,7):C.GC_1238,(0,1):C.GC_1232,(2,1):C.GC_1236})
V_1587 = Vertex(name = 'V_1587',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1186,(0,1):C.GC_1289})
V_1588 = Vertex(name = 'V_1588',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1284})
V_1589 = Vertex(name = 'V_1589',
particles = [ P.u__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1294})
V_1590 = Vertex(name = 'V_1590',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1865,(0,5):C.GC_1874,(1,0):C.GC_1972,(3,0):C.GC_1976,(0,3):C.GC_1970,(2,3):C.GC_1974,(1,2):C.GC_1965,(3,2):C.GC_1968,(1,6):C.GC_1898,(3,6):C.GC_1904,(0,1):C.GC_1895,(2,1):C.GC_1901})
V_1591 = Vertex(name = 'V_1591',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1883,(0,1):C.GC_2019})
V_1592 = Vertex(name = 'V_1592',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2017})
V_1593 = Vertex(name = 'V_1593',
particles = [ P.c__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2021})
V_1594 = Vertex(name = 'V_1594',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2966,(0,5):C.GC_2917,(1,0):C.GC_2835,(3,0):C.GC_2840,(0,3):C.GC_2832,(2,3):C.GC_2837,(1,2):C.GC_2954,(3,2):C.GC_2956,(1,6):C.GC_2818,(3,6):C.GC_2824,(0,1):C.GC_2815,(2,1):C.GC_2821})
V_1595 = Vertex(name = 'V_1595',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2976,(0,2):C.GC_2765,(1,0):C.GC_2864,(2,0):C.GC_2867})
V_1596 = Vertex(name = 'V_1596',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2756,(0,1):C.GC_2910})
V_1597 = Vertex(name = 'V_1597',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2774})
V_1598 = Vertex(name = 'V_1598',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2901})
V_1599 = Vertex(name = 'V_1599',
particles = [ P.t__tilde__, P.d, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2916})
V_1600 = Vertex(name = 'V_1600',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1311,(0,5):C.GC_1320,(1,0):C.GC_3312,(3,0):C.GC_3316,(0,3):C.GC_3310,(2,3):C.GC_3314,(1,2):C.GC_1442,(3,2):C.GC_1445,(1,6):C.GC_3307,(3,6):C.GC_3309,(0,1):C.GC_3306,(2,1):C.GC_3308})
V_1601 = Vertex(name = 'V_1601',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1329,(0,2):C.GC_3256,(1,0):C.GC_3326,(2,0):C.GC_3331})
V_1602 = Vertex(name = 'V_1602',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3254,(0,1):C.GC_3283})
V_1603 = Vertex(name = 'V_1603',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3272})
V_1604 = Vertex(name = 'V_1604',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3258})
V_1605 = Vertex(name = 'V_1605',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3348})
V_1606 = Vertex(name = 'V_1606',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2033,(0,6):C.GC_2042,(1,0):C.GC_2179,(3,0):C.GC_2185,(0,4):C.GC_2176,(2,4):C.GC_2182,(1,3):C.GC_725,(3,3):C.GC_726,(1,2):C.GC_2170,(3,2):C.GC_2173,(1,7):C.GC_2090,(3,7):C.GC_2096,(0,1):C.GC_2087,(2,1):C.GC_2093})
V_1607 = Vertex(name = 'V_1607',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2051,(0,1):C.GC_2225})
V_1608 = Vertex(name = 'V_1608',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2222})
V_1609 = Vertex(name = 'V_1609',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2228})
V_1610 = Vertex(name = 'V_1610',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3098,(0,5):C.GC_3107,(1,0):C.GC_3239,(3,0):C.GC_3245,(0,3):C.GC_3236,(2,3):C.GC_3242,(1,2):C.GC_3248,(3,2):C.GC_3251,(1,6):C.GC_3188,(3,6):C.GC_3194,(0,1):C.GC_3185,(2,1):C.GC_3191})
V_1611 = Vertex(name = 'V_1611',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3116,(0,1):C.GC_3360})
V_1612 = Vertex(name = 'V_1612',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3353})
V_1613 = Vertex(name = 'V_1613',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3369})
V_1614 = Vertex(name = 'V_1614',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1314,(0,5):C.GC_1323,(1,0):C.GC_1362,(3,0):C.GC_1368,(0,3):C.GC_1359,(2,3):C.GC_1365,(1,2):C.GC_1400,(3,2):C.GC_1403,(1,6):C.GC_1408,(3,6):C.GC_1412,(0,1):C.GC_1406,(2,1):C.GC_1410})
V_1615 = Vertex(name = 'V_1615',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1332,(0,1):C.GC_1457})
V_1616 = Vertex(name = 'V_1616',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1450})
V_1617 = Vertex(name = 'V_1617',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1464})
V_1618 = Vertex(name = 'V_1618',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2036,(0,5):C.GC_3283,(1,0):C.GC_2117,(3,0):C.GC_2127,(0,3):C.GC_2112,(2,3):C.GC_2122,(1,2):C.GC_2081,(3,2):C.GC_2084,(1,6):C.GC_2091,(3,6):C.GC_2097,(0,1):C.GC_2088,(2,1):C.GC_2094})
V_1619 = Vertex(name = 'V_1619',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2054,(0,2):C.GC_2045,(1,0):C.GC_3326,(2,0):C.GC_3331})
V_1620 = Vertex(name = 'V_1620',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3348,(0,1):C.GC_2192})
V_1621 = Vertex(name = 'V_1621',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2189})
V_1622 = Vertex(name = 'V_1622',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3364})
V_1623 = Vertex(name = 'V_1623',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2195})
V_1624 = Vertex(name = 'V_1624',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3101,(0,5):C.GC_3110,(1,0):C.GC_3156,(3,0):C.GC_3162,(0,3):C.GC_3153,(2,3):C.GC_3159,(1,2):C.GC_3179,(3,2):C.GC_3182,(1,6):C.GC_3189,(3,6):C.GC_3195,(0,1):C.GC_3186,(2,1):C.GC_3192})
V_1625 = Vertex(name = 'V_1625',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3119,(0,1):C.GC_3269})
V_1626 = Vertex(name = 'V_1626',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3263})
V_1627 = Vertex(name = 'V_1627',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3279})
V_1628 = Vertex(name = 'V_1628',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1317,(0,5):C.GC_1326,(1,0):C.GC_1382,(3,0):C.GC_1388,(0,3):C.GC_1379,(2,3):C.GC_1385,(1,2):C.GC_1433,(3,2):C.GC_1436,(1,6):C.GC_1409,(3,6):C.GC_1413,(0,1):C.GC_1407,(2,1):C.GC_1411})
V_1629 = Vertex(name = 'V_1629',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1335,(0,1):C.GC_1460})
V_1630 = Vertex(name = 'V_1630',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1453})
V_1631 = Vertex(name = 'V_1631',
particles = [ P.u__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1467})
V_1632 = Vertex(name = 'V_1632',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2039,(0,5):C.GC_2048,(1,0):C.GC_2157,(3,0):C.GC_2163,(0,3):C.GC_2154,(2,3):C.GC_2160,(1,2):C.GC_2148,(3,2):C.GC_2151,(1,6):C.GC_2092,(3,6):C.GC_2098,(0,1):C.GC_2089,(2,1):C.GC_2095})
V_1633 = Vertex(name = 'V_1633',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2057,(0,1):C.GC_2212})
V_1634 = Vertex(name = 'V_1634',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2209})
V_1635 = Vertex(name = 'V_1635',
particles = [ P.c__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2215})
V_1636 = Vertex(name = 'V_1636',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3348,(0,5):C.GC_3283,(1,0):C.GC_3210,(3,0):C.GC_3220,(0,3):C.GC_3205,(2,3):C.GC_3215,(1,2):C.GC_3326,(3,2):C.GC_3331,(1,6):C.GC_3190,(3,6):C.GC_3196,(0,1):C.GC_3187,(2,1):C.GC_3193})
V_1637 = Vertex(name = 'V_1637',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3364,(0,2):C.GC_3113,(1,0):C.GC_3226,(2,0):C.GC_3229})
V_1638 = Vertex(name = 'V_1638',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3104,(0,1):C.GC_3273})
V_1639 = Vertex(name = 'V_1639',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3122})
V_1640 = Vertex(name = 'V_1640',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3266})
V_1641 = Vertex(name = 'V_1641',
particles = [ P.t__tilde__, P.s, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3282})
V_1642 = Vertex(name = 'V_1642',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_1059,(3,0):C.GC_1061,(0,5):C.GC_1149,(2,5):C.GC_1150,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_1059,(3,8):C.GC_1061,(0,1):C.GC_1149,(2,1):C.GC_1150})
V_1643 = Vertex(name = 'V_1643',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_3776,(1,0):C.GC_1422,(3,0):C.GC_1427,(0,5):C.GC_1060,(2,5):C.GC_1062,(1,4):C.GC_3705,(3,4):C.GC_3713,(1,2):C.GC_3329,(3,2):C.GC_3334,(1,3):C.GC_1098,(3,3):C.GC_1101,(1,8):C.GC_1422,(3,8):C.GC_1427,(0,1):C.GC_1060,(2,1):C.GC_1062})
V_1644 = Vertex(name = 'V_1644',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3747,(0,2):C.GC_1321,(1,0):C.GC_1443,(2,0):C.GC_1446})
V_1645 = Vertex(name = 'V_1645',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3274,(0,1):C.GC_1456})
V_1646 = Vertex(name = 'V_1646',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1312})
V_1647 = Vertex(name = 'V_1647',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1330})
V_1648 = Vertex(name = 'V_1648',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1449})
V_1649 = Vertex(name = 'V_1649',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1463})
V_1650 = Vertex(name = 'V_1650',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2034,(0,6):C.GC_2392,(1,0):C.GC_2180,(3,0):C.GC_2186,(0,4):C.GC_2177,(2,4):C.GC_2183,(1,3):C.GC_2409,(3,3):C.GC_2414,(1,2):C.GC_2171,(3,2):C.GC_2174,(1,7):C.GC_2116,(3,7):C.GC_2126,(0,1):C.GC_2111,(2,1):C.GC_2121})
V_1651 = Vertex(name = 'V_1651',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2052,(0,1):C.GC_2043})
V_1652 = Vertex(name = 'V_1652',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2223,(0,1):C.GC_2226})
V_1653 = Vertex(name = 'V_1653',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2229})
V_1654 = Vertex(name = 'V_1654',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2439})
V_1655 = Vertex(name = 'V_1655',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2449})
V_1656 = Vertex(name = 'V_1656',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_3099,(0,6):C.GC_3692,(1,0):C.GC_3240,(3,0):C.GC_3246,(0,4):C.GC_3237,(2,4):C.GC_3243,(1,3):C.GC_3698,(3,3):C.GC_3706,(1,2):C.GC_3249,(3,2):C.GC_3252,(1,7):C.GC_3208,(3,7):C.GC_3218,(0,1):C.GC_3203,(2,1):C.GC_3213})
V_1657 = Vertex(name = 'V_1657',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3117,(0,1):C.GC_3108})
V_1658 = Vertex(name = 'V_1658',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3740,(0,1):C.GC_3361})
V_1659 = Vertex(name = 'V_1659',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3758})
V_1660 = Vertex(name = 'V_1660',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3354})
V_1661 = Vertex(name = 'V_1661',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3370})
V_1662 = Vertex(name = 'V_1662',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1315,(0,6):C.GC_1629,(1,0):C.GC_1363,(3,0):C.GC_1369,(0,4):C.GC_1360,(2,4):C.GC_1366,(1,3):C.GC_1635,(3,3):C.GC_1637,(1,2):C.GC_1401,(3,2):C.GC_1404,(1,7):C.GC_1423,(3,7):C.GC_1428,(0,1):C.GC_1420,(2,1):C.GC_1425})
V_1663 = Vertex(name = 'V_1663',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1333,(0,1):C.GC_1324})
V_1664 = Vertex(name = 'V_1664',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1451,(0,1):C.GC_1458})
V_1665 = Vertex(name = 'V_1665',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1465})
V_1666 = Vertex(name = 'V_1666',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1651})
V_1667 = Vertex(name = 'V_1667',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1656})
V_1668 = Vertex(name = 'V_1668',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_718,(3,0):C.GC_720,(0,5):C.GC_719,(2,5):C.GC_721,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_718,(3,8):C.GC_720,(0,1):C.GC_719,(2,1):C.GC_721})
V_1669 = Vertex(name = 'V_1669',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_3375,(1,0):C.GC_2118,(3,0):C.GC_2128,(0,5):C.GC_2113,(2,5):C.GC_2123,(1,4):C.GC_2412,(3,4):C.GC_2417,(1,2):C.GC_3327,(3,2):C.GC_3332,(1,3):C.GC_749,(3,3):C.GC_750,(1,8):C.GC_2118,(3,8):C.GC_2128,(0,1):C.GC_2113,(2,1):C.GC_2123})
V_1670 = Vertex(name = 'V_1670',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3349,(0,3):C.GC_2046,(1,1):C.GC_735,(2,1):C.GC_736,(1,0):C.GC_2082,(2,0):C.GC_2085})
V_1671 = Vertex(name = 'V_1671',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3275,(0,1):C.GC_2193})
V_1672 = Vertex(name = 'V_1672',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2037})
V_1673 = Vertex(name = 'V_1673',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2055})
V_1674 = Vertex(name = 'V_1674',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2190})
V_1675 = Vertex(name = 'V_1675',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2196})
V_1676 = Vertex(name = 'V_1676',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_3102,(0,6):C.GC_3693,(1,0):C.GC_3157,(3,0):C.GC_3163,(0,4):C.GC_3154,(2,4):C.GC_3160,(1,3):C.GC_3699,(3,3):C.GC_3707,(1,2):C.GC_3180,(3,2):C.GC_3183,(1,7):C.GC_3209,(3,7):C.GC_3219,(0,1):C.GC_3204,(2,1):C.GC_3214})
V_1677 = Vertex(name = 'V_1677',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3120,(0,1):C.GC_3111})
V_1678 = Vertex(name = 'V_1678',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3264,(0,1):C.GC_3270})
V_1679 = Vertex(name = 'V_1679',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3280})
V_1680 = Vertex(name = 'V_1680',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3741})
V_1681 = Vertex(name = 'V_1681',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3760})
V_1682 = Vertex(name = 'V_1682',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1318,(0,6):C.GC_1630,(1,0):C.GC_1383,(3,0):C.GC_1389,(0,4):C.GC_1380,(2,4):C.GC_1386,(1,3):C.GC_1636,(3,3):C.GC_1638,(1,2):C.GC_1434,(3,2):C.GC_1437,(1,7):C.GC_1424,(3,7):C.GC_1429,(0,1):C.GC_1421,(2,1):C.GC_1426})
V_1683 = Vertex(name = 'V_1683',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1336,(0,1):C.GC_1327})
V_1684 = Vertex(name = 'V_1684',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1652,(0,1):C.GC_1461})
V_1685 = Vertex(name = 'V_1685',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1658})
V_1686 = Vertex(name = 'V_1686',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1454})
V_1687 = Vertex(name = 'V_1687',
particles = [ P.u__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1468})
V_1688 = Vertex(name = 'V_1688',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2040,(0,6):C.GC_2393,(1,0):C.GC_2158,(3,0):C.GC_2164,(0,4):C.GC_2155,(2,4):C.GC_2161,(1,3):C.GC_2413,(3,3):C.GC_2418,(1,2):C.GC_2149,(3,2):C.GC_2152,(1,7):C.GC_2120,(3,7):C.GC_2130,(0,1):C.GC_2115,(2,1):C.GC_2125})
V_1689 = Vertex(name = 'V_1689',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2058,(0,1):C.GC_2049})
V_1690 = Vertex(name = 'V_1690',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2210,(0,1):C.GC_2213})
V_1691 = Vertex(name = 'V_1691',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2216})
V_1692 = Vertex(name = 'V_1692',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2442})
V_1693 = Vertex(name = 'V_1693',
particles = [ P.c__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2454})
V_1694 = Vertex(name = 'V_1694',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_837,(3,0):C.GC_839,(0,5):C.GC_838,(2,5):C.GC_840,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_837,(3,8):C.GC_839,(0,1):C.GC_838,(2,1):C.GC_840})
V_1695 = Vertex(name = 'V_1695',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_3773,(1,0):C.GC_3211,(3,0):C.GC_3221,(0,5):C.GC_3206,(2,5):C.GC_3216,(1,4):C.GC_3702,(3,4):C.GC_3710,(1,2):C.GC_3328,(3,2):C.GC_3333,(1,3):C.GC_860,(3,3):C.GC_863,(1,8):C.GC_3211,(3,8):C.GC_3221,(0,1):C.GC_3206,(2,1):C.GC_3216})
V_1696 = Vertex(name = 'V_1696',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3744,(0,2):C.GC_3114,(1,0):C.GC_3227,(2,0):C.GC_3230})
V_1697 = Vertex(name = 'V_1697',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3691,(0,1):C.GC_3276})
V_1698 = Vertex(name = 'V_1698',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3105})
V_1699 = Vertex(name = 'V_1699',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3123})
V_1700 = Vertex(name = 'V_1700',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3267})
V_1701 = Vertex(name = 'V_1701',
particles = [ P.t__tilde__, P.s, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3284})
V_1702 = Vertex(name = 'V_1702',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1313,(0,5):C.GC_1322,(1,0):C.GC_3313,(3,0):C.GC_3317,(0,3):C.GC_3311,(2,3):C.GC_3315,(1,2):C.GC_1444,(3,2):C.GC_1447,(1,6):C.GC_3303,(3,6):C.GC_3305,(0,1):C.GC_3302,(2,1):C.GC_3304})
V_1703 = Vertex(name = 'V_1703',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1331,(0,2):C.GC_3257,(1,0):C.GC_3330,(2,0):C.GC_3335})
V_1704 = Vertex(name = 'V_1704',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3255,(0,1):C.GC_3286})
V_1705 = Vertex(name = 'V_1705',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3277})
V_1706 = Vertex(name = 'V_1706',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3259})
V_1707 = Vertex(name = 'V_1707',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3352})
V_1708 = Vertex(name = 'V_1708',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2035,(0,5):C.GC_2044,(1,0):C.GC_2181,(3,0):C.GC_2187,(0,3):C.GC_2178,(2,3):C.GC_2184,(1,2):C.GC_2172,(3,2):C.GC_2175,(1,6):C.GC_2072,(3,6):C.GC_2078,(0,1):C.GC_2069,(2,1):C.GC_2075})
V_1709 = Vertex(name = 'V_1709',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2053,(0,1):C.GC_2227})
V_1710 = Vertex(name = 'V_1710',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2224})
V_1711 = Vertex(name = 'V_1711',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2230})
V_1712 = Vertex(name = 'V_1712',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3100,(0,5):C.GC_3109,(1,0):C.GC_3241,(3,0):C.GC_3247,(0,3):C.GC_3238,(2,3):C.GC_3244,(1,2):C.GC_3250,(3,2):C.GC_3253,(1,6):C.GC_3170,(3,6):C.GC_3176,(0,1):C.GC_3167,(2,1):C.GC_3173})
V_1713 = Vertex(name = 'V_1713',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3118,(0,1):C.GC_3362})
V_1714 = Vertex(name = 'V_1714',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3355})
V_1715 = Vertex(name = 'V_1715',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3371})
V_1716 = Vertex(name = 'V_1716',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1316,(0,5):C.GC_1325,(1,0):C.GC_1364,(3,0):C.GC_1370,(0,3):C.GC_1361,(2,3):C.GC_1367,(1,2):C.GC_1402,(3,2):C.GC_1405,(1,6):C.GC_1394,(3,6):C.GC_1398,(0,1):C.GC_1392,(2,1):C.GC_1396})
V_1717 = Vertex(name = 'V_1717',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1334,(0,1):C.GC_1459})
V_1718 = Vertex(name = 'V_1718',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1452})
V_1719 = Vertex(name = 'V_1719',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1466})
V_1720 = Vertex(name = 'V_1720',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2038,(0,5):C.GC_3286,(1,0):C.GC_2119,(3,0):C.GC_2129,(0,3):C.GC_2114,(2,3):C.GC_2124,(1,2):C.GC_2083,(3,2):C.GC_2086,(1,6):C.GC_2073,(3,6):C.GC_2079,(0,1):C.GC_2070,(2,1):C.GC_2076})
V_1721 = Vertex(name = 'V_1721',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2056,(0,2):C.GC_2047,(1,0):C.GC_3330,(2,0):C.GC_3335})
V_1722 = Vertex(name = 'V_1722',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2191,(0,1):C.GC_2194})
V_1723 = Vertex(name = 'V_1723',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2197})
V_1724 = Vertex(name = 'V_1724',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3352})
V_1725 = Vertex(name = 'V_1725',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3368})
V_1726 = Vertex(name = 'V_1726',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3103,(0,5):C.GC_3112,(1,0):C.GC_3158,(3,0):C.GC_3164,(0,3):C.GC_3155,(2,3):C.GC_3161,(1,2):C.GC_3181,(3,2):C.GC_3184,(1,6):C.GC_3171,(3,6):C.GC_3177,(0,1):C.GC_3168,(2,1):C.GC_3174})
V_1727 = Vertex(name = 'V_1727',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3121,(0,1):C.GC_3271})
V_1728 = Vertex(name = 'V_1728',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3265})
V_1729 = Vertex(name = 'V_1729',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3281})
V_1730 = Vertex(name = 'V_1730',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1319,(0,5):C.GC_1328,(1,0):C.GC_1384,(3,0):C.GC_1390,(0,3):C.GC_1381,(2,3):C.GC_1387,(1,2):C.GC_1435,(3,2):C.GC_1438,(1,6):C.GC_1395,(3,6):C.GC_1399,(0,1):C.GC_1393,(2,1):C.GC_1397})
V_1731 = Vertex(name = 'V_1731',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1337,(0,1):C.GC_1462})
V_1732 = Vertex(name = 'V_1732',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1455})
V_1733 = Vertex(name = 'V_1733',
particles = [ P.u__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1469})
V_1734 = Vertex(name = 'V_1734',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2041,(0,6):C.GC_2050,(1,0):C.GC_2159,(3,0):C.GC_2165,(0,4):C.GC_2156,(2,4):C.GC_2162,(1,3):C.GC_716,(3,3):C.GC_717,(1,2):C.GC_2150,(3,2):C.GC_2153,(1,7):C.GC_2074,(3,7):C.GC_2080,(0,1):C.GC_2071,(2,1):C.GC_2077})
V_1735 = Vertex(name = 'V_1735',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2059,(0,1):C.GC_2214})
V_1736 = Vertex(name = 'V_1736',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2211})
V_1737 = Vertex(name = 'V_1737',
particles = [ P.c__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2217})
V_1738 = Vertex(name = 'V_1738',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3352,(0,5):C.GC_3286,(1,0):C.GC_3212,(3,0):C.GC_3222,(0,3):C.GC_3207,(2,3):C.GC_3217,(1,2):C.GC_3330,(3,2):C.GC_3335,(1,6):C.GC_3172,(3,6):C.GC_3178,(0,1):C.GC_3169,(2,1):C.GC_3175})
V_1739 = Vertex(name = 'V_1739',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3368,(0,2):C.GC_3115,(1,0):C.GC_3228,(2,0):C.GC_3231})
V_1740 = Vertex(name = 'V_1740',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3106,(0,1):C.GC_3278})
V_1741 = Vertex(name = 'V_1741',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3124})
V_1742 = Vertex(name = 'V_1742',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3268})
V_1743 = Vertex(name = 'V_1743',
particles = [ P.t__tilde__, P.s, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3285})
V_1744 = Vertex(name = 'V_1744',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1486,(0,5):C.GC_1495,(1,0):C.GC_3992,(3,0):C.GC_3996,(0,3):C.GC_3990,(2,3):C.GC_3994,(1,2):C.GC_1617,(3,2):C.GC_1620,(1,6):C.GC_4001,(3,6):C.GC_4005,(0,1):C.GC_3999,(2,1):C.GC_4003})
V_1745 = Vertex(name = 'V_1745',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1504,(0,2):C.GC_3761,(1,0):C.GC_4086,(2,0):C.GC_4091})
V_1746 = Vertex(name = 'V_1746',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3752,(0,1):C.GC_3959})
V_1747 = Vertex(name = 'V_1747',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3946})
V_1748 = Vertex(name = 'V_1748',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3768})
V_1749 = Vertex(name = 'V_1749',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4108})
V_1750 = Vertex(name = 'V_1750',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2232,(0,5):C.GC_2241,(1,0):C.GC_2373,(3,0):C.GC_2379,(0,3):C.GC_2370,(2,3):C.GC_2376,(1,2):C.GC_2382,(3,2):C.GC_2385,(1,6):C.GC_2301,(3,6):C.GC_2307,(0,1):C.GC_2298,(2,1):C.GC_2304})
V_1751 = Vertex(name = 'V_1751',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2250,(0,1):C.GC_2698})
V_1752 = Vertex(name = 'V_1752',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2692})
V_1753 = Vertex(name = 'V_1753',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2704})
V_1754 = Vertex(name = 'V_1754',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_3530,(0,6):C.GC_3539,(1,0):C.GC_3671,(3,0):C.GC_3677,(0,4):C.GC_3668,(2,4):C.GC_3674,(1,3):C.GC_543,(3,3):C.GC_544,(1,2):C.GC_3680,(3,2):C.GC_3683,(1,7):C.GC_3631,(3,7):C.GC_3637,(0,1):C.GC_3628,(2,1):C.GC_3634})
V_1755 = Vertex(name = 'V_1755',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3548,(0,1):C.GC_4123})
V_1756 = Vertex(name = 'V_1756',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4115})
V_1757 = Vertex(name = 'V_1757',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4133})
V_1758 = Vertex(name = 'V_1758',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1489,(0,5):C.GC_1498,(1,0):C.GC_1525,(3,0):C.GC_1531,(0,3):C.GC_1522,(2,3):C.GC_1528,(1,2):C.GC_1580,(3,2):C.GC_1583,(1,6):C.GC_1588,(3,6):C.GC_1592,(0,1):C.GC_1586,(2,1):C.GC_1590})
V_1759 = Vertex(name = 'V_1759',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1507,(0,1):C.GC_1824})
V_1760 = Vertex(name = 'V_1760',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1815})
V_1761 = Vertex(name = 'V_1761',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1833})
V_1762 = Vertex(name = 'V_1762',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2235,(0,5):C.GC_3959,(1,0):C.GC_2274,(3,0):C.GC_2284,(0,3):C.GC_2269,(2,3):C.GC_2279,(1,2):C.GC_2292,(3,2):C.GC_2295,(1,6):C.GC_2302,(3,6):C.GC_2308,(0,1):C.GC_2299,(2,1):C.GC_2305})
V_1763 = Vertex(name = 'V_1763',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2253,(0,2):C.GC_2244,(1,0):C.GC_4086,(2,0):C.GC_4091})
V_1764 = Vertex(name = 'V_1764',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2444,(0,1):C.GC_2450})
V_1765 = Vertex(name = 'V_1765',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2456})
V_1766 = Vertex(name = 'V_1766',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4108})
V_1767 = Vertex(name = 'V_1767',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4128})
V_1768 = Vertex(name = 'V_1768',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3533,(0,5):C.GC_3542,(1,0):C.GC_3576,(3,0):C.GC_3582,(0,3):C.GC_3573,(2,3):C.GC_3579,(1,2):C.GC_3622,(3,2):C.GC_3625,(1,6):C.GC_3632,(3,6):C.GC_3638,(0,1):C.GC_3629,(2,1):C.GC_3635})
V_1769 = Vertex(name = 'V_1769',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3551,(0,1):C.GC_3942})
V_1770 = Vertex(name = 'V_1770',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3935})
V_1771 = Vertex(name = 'V_1771',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3954})
V_1772 = Vertex(name = 'V_1772',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1492,(0,5):C.GC_1501,(1,0):C.GC_1557,(3,0):C.GC_1563,(0,3):C.GC_1554,(2,3):C.GC_1560,(1,2):C.GC_1608,(3,2):C.GC_1611,(1,6):C.GC_1589,(3,6):C.GC_1593,(0,1):C.GC_1587,(2,1):C.GC_1591})
V_1773 = Vertex(name = 'V_1773',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1510,(0,1):C.GC_1828})
V_1774 = Vertex(name = 'V_1774',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1819})
V_1775 = Vertex(name = 'V_1775',
particles = [ P.u__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1837})
V_1776 = Vertex(name = 'V_1776',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2238,(0,5):C.GC_2247,(1,0):C.GC_2351,(3,0):C.GC_2357,(0,3):C.GC_2348,(2,3):C.GC_2354,(1,2):C.GC_2360,(3,2):C.GC_2363,(1,6):C.GC_2303,(3,6):C.GC_2309,(0,1):C.GC_2300,(2,1):C.GC_2306})
V_1777 = Vertex(name = 'V_1777',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2256,(0,1):C.GC_2617})
V_1778 = Vertex(name = 'V_1778',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2613})
V_1779 = Vertex(name = 'V_1779',
particles = [ P.c__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2621})
V_1780 = Vertex(name = 'V_1780',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_4108,(0,5):C.GC_3959,(1,0):C.GC_3606,(3,0):C.GC_3616,(0,3):C.GC_3601,(2,3):C.GC_3611,(1,2):C.GC_4086,(3,2):C.GC_4091,(1,6):C.GC_3633,(3,6):C.GC_3639,(0,1):C.GC_3630,(2,1):C.GC_3636})
V_1781 = Vertex(name = 'V_1781',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_4128,(0,2):C.GC_3545,(1,0):C.GC_3658,(2,0):C.GC_3661})
V_1782 = Vertex(name = 'V_1782',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3536,(0,1):C.GC_3947})
V_1783 = Vertex(name = 'V_1783',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3554})
V_1784 = Vertex(name = 'V_1784',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3939})
V_1785 = Vertex(name = 'V_1785',
particles = [ P.t__tilde__, P.b, P.d__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3958})
V_1786 = Vertex(name = 'V_1786',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1487,(0,5):C.GC_1496,(1,0):C.GC_3993,(3,0):C.GC_3997,(0,3):C.GC_3991,(2,3):C.GC_3995,(1,2):C.GC_1618,(3,2):C.GC_1621,(1,6):C.GC_4069,(3,6):C.GC_4071,(0,1):C.GC_4068,(2,1):C.GC_4070})
V_1787 = Vertex(name = 'V_1787',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1505,(0,2):C.GC_3831,(1,0):C.GC_4087,(2,0):C.GC_4092})
V_1788 = Vertex(name = 'V_1788',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3830,(0,1):C.GC_3961})
V_1789 = Vertex(name = 'V_1789',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3948})
V_1790 = Vertex(name = 'V_1790',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3832})
V_1791 = Vertex(name = 'V_1791',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4109})
V_1792 = Vertex(name = 'V_1792',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2233,(0,5):C.GC_2242,(1,0):C.GC_2374,(3,0):C.GC_2380,(0,3):C.GC_2371,(2,3):C.GC_2377,(1,2):C.GC_2383,(3,2):C.GC_2386,(1,6):C.GC_2327,(3,6):C.GC_2333,(0,1):C.GC_2324,(2,1):C.GC_2330})
V_1793 = Vertex(name = 'V_1793',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2251,(0,1):C.GC_2699})
V_1794 = Vertex(name = 'V_1794',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2693})
V_1795 = Vertex(name = 'V_1795',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2705})
V_1796 = Vertex(name = 'V_1796',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_3531,(0,5):C.GC_3540,(1,0):C.GC_3672,(3,0):C.GC_3678,(0,3):C.GC_3669,(2,3):C.GC_3675,(1,2):C.GC_3681,(3,2):C.GC_3684,(1,6):C.GC_3649,(3,6):C.GC_3655,(0,1):C.GC_3646,(2,1):C.GC_3652})
V_1797 = Vertex(name = 'V_1797',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3549,(0,1):C.GC_4124})
V_1798 = Vertex(name = 'V_1798',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4116})
V_1799 = Vertex(name = 'V_1799',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4134})
V_1800 = Vertex(name = 'V_1800',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1490,(0,5):C.GC_1499,(1,0):C.GC_1526,(3,0):C.GC_1532,(0,3):C.GC_1523,(2,3):C.GC_1529,(1,2):C.GC_1581,(3,2):C.GC_1584,(1,6):C.GC_1602,(3,6):C.GC_1606,(0,1):C.GC_1600,(2,1):C.GC_1604})
V_1801 = Vertex(name = 'V_1801',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1508,(0,1):C.GC_1825})
V_1802 = Vertex(name = 'V_1802',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1816})
V_1803 = Vertex(name = 'V_1803',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1834})
V_1804 = Vertex(name = 'V_1804',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2236,(0,5):C.GC_3961,(1,0):C.GC_2275,(3,0):C.GC_2285,(0,3):C.GC_2270,(2,3):C.GC_2280,(1,2):C.GC_2293,(3,2):C.GC_2296,(1,6):C.GC_2328,(3,6):C.GC_2334,(0,1):C.GC_2325,(2,1):C.GC_2331})
V_1805 = Vertex(name = 'V_1805',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2254,(0,2):C.GC_2245,(1,0):C.GC_4087,(2,0):C.GC_4092})
V_1806 = Vertex(name = 'V_1806',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2577,(0,1):C.GC_2578})
V_1807 = Vertex(name = 'V_1807',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2579})
V_1808 = Vertex(name = 'V_1808',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4109})
V_1809 = Vertex(name = 'V_1809',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4130})
V_1810 = Vertex(name = 'V_1810',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_3534,(0,6):C.GC_3543,(1,0):C.GC_3577,(3,0):C.GC_3583,(0,4):C.GC_3574,(2,4):C.GC_3580,(1,3):C.GC_716,(3,3):C.GC_717,(1,2):C.GC_3623,(3,2):C.GC_3626,(1,7):C.GC_3650,(3,7):C.GC_3656,(0,1):C.GC_3647,(2,1):C.GC_3653})
V_1811 = Vertex(name = 'V_1811',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3552,(0,1):C.GC_3943})
V_1812 = Vertex(name = 'V_1812',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3936})
V_1813 = Vertex(name = 'V_1813',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3955})
V_1814 = Vertex(name = 'V_1814',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_1493,(0,5):C.GC_1502,(1,0):C.GC_1558,(3,0):C.GC_1564,(0,3):C.GC_1555,(2,3):C.GC_1561,(1,2):C.GC_1609,(3,2):C.GC_1612,(1,6):C.GC_1603,(3,6):C.GC_1607,(0,1):C.GC_1601,(2,1):C.GC_1605})
V_1815 = Vertex(name = 'V_1815',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1511,(0,1):C.GC_1829})
V_1816 = Vertex(name = 'V_1816',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1820})
V_1817 = Vertex(name = 'V_1817',
particles = [ P.u__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1838})
V_1818 = Vertex(name = 'V_1818',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_2239,(0,5):C.GC_2248,(1,0):C.GC_2352,(3,0):C.GC_2358,(0,3):C.GC_2349,(2,3):C.GC_2355,(1,2):C.GC_2361,(3,2):C.GC_2364,(1,6):C.GC_2329,(3,6):C.GC_2335,(0,1):C.GC_2326,(2,1):C.GC_2332})
V_1819 = Vertex(name = 'V_1819',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2257,(0,1):C.GC_2618})
V_1820 = Vertex(name = 'V_1820',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2614})
V_1821 = Vertex(name = 'V_1821',
particles = [ P.c__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2622})
V_1822 = Vertex(name = 'V_1822',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,4):C.GC_4109,(0,5):C.GC_3961,(1,0):C.GC_3607,(3,0):C.GC_3617,(0,3):C.GC_3602,(2,3):C.GC_3612,(1,2):C.GC_4087,(3,2):C.GC_4092,(1,6):C.GC_3651,(3,6):C.GC_3657,(0,1):C.GC_3648,(2,1):C.GC_3654})
V_1823 = Vertex(name = 'V_1823',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_4130,(0,2):C.GC_3546,(1,0):C.GC_3659,(2,0):C.GC_3662})
V_1824 = Vertex(name = 'V_1824',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3537,(0,1):C.GC_3949})
V_1825 = Vertex(name = 'V_1825',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3555})
V_1826 = Vertex(name = 'V_1826',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3940})
V_1827 = Vertex(name = 'V_1827',
particles = [ P.t__tilde__, P.b, P.s__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3960})
V_1828 = Vertex(name = 'V_1828',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_1019,(3,0):C.GC_1021,(0,5):C.GC_1155,(2,5):C.GC_1156,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_1019,(3,8):C.GC_1021,(0,1):C.GC_1155,(2,1):C.GC_1156})
V_1829 = Vertex(name = 'V_1829',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_4143,(1,0):C.GC_1569,(3,0):C.GC_1574,(0,5):C.GC_1020,(2,5):C.GC_1022,(1,4):C.GC_3703,(3,4):C.GC_3711,(1,2):C.GC_4090,(3,2):C.GC_4095,(1,3):C.GC_1096,(3,3):C.GC_1099,(1,8):C.GC_1569,(3,8):C.GC_1574,(0,1):C.GC_1020,(2,1):C.GC_1022})
V_1830 = Vertex(name = 'V_1830',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_4112,(0,2):C.GC_1497,(1,0):C.GC_1619,(2,0):C.GC_1622})
V_1831 = Vertex(name = 'V_1831',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3950,(0,1):C.GC_1823})
V_1832 = Vertex(name = 'V_1832',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1488})
V_1833 = Vertex(name = 'V_1833',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1506})
V_1834 = Vertex(name = 'V_1834',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1814})
V_1835 = Vertex(name = 'V_1835',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1832})
V_1836 = Vertex(name = 'V_1836',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2439,(0,6):C.GC_2392,(1,0):C.GC_2375,(3,0):C.GC_2381,(0,4):C.GC_2372,(2,4):C.GC_2378,(1,3):C.GC_2409,(3,3):C.GC_2414,(1,2):C.GC_2384,(3,2):C.GC_2387,(1,7):C.GC_2273,(3,7):C.GC_2283,(0,1):C.GC_2268,(2,1):C.GC_2278})
V_1837 = Vertex(name = 'V_1837',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2449,(0,1):C.GC_2243})
V_1838 = Vertex(name = 'V_1838',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2234,(0,1):C.GC_2701})
V_1839 = Vertex(name = 'V_1839',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2252})
V_1840 = Vertex(name = 'V_1840',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2695})
V_1841 = Vertex(name = 'V_1841',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2706})
V_1842 = Vertex(name = 'V_1842',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_3740,(0,6):C.GC_3692,(1,0):C.GC_3673,(3,0):C.GC_3679,(0,4):C.GC_3670,(2,4):C.GC_3676,(1,3):C.GC_3698,(3,3):C.GC_3706,(1,2):C.GC_3682,(3,2):C.GC_3685,(1,7):C.GC_3604,(3,7):C.GC_3614,(0,1):C.GC_3599,(2,1):C.GC_3609})
V_1843 = Vertex(name = 'V_1843',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3758,(0,1):C.GC_3541})
V_1844 = Vertex(name = 'V_1844',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3532,(0,1):C.GC_4126})
V_1845 = Vertex(name = 'V_1845',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3550})
V_1846 = Vertex(name = 'V_1846',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4118})
V_1847 = Vertex(name = 'V_1847',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.u ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_4135})
V_1848 = Vertex(name = 'V_1848',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1651,(0,6):C.GC_1629,(1,0):C.GC_1527,(3,0):C.GC_1533,(0,4):C.GC_1524,(2,4):C.GC_1530,(1,3):C.GC_1635,(3,3):C.GC_1637,(1,2):C.GC_1582,(3,2):C.GC_1585,(1,7):C.GC_1570,(3,7):C.GC_1575,(0,1):C.GC_1567,(2,1):C.GC_1572})
V_1849 = Vertex(name = 'V_1849',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1656,(0,1):C.GC_1500})
V_1850 = Vertex(name = 'V_1850',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1491,(0,1):C.GC_1827})
V_1851 = Vertex(name = 'V_1851',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1509})
V_1852 = Vertex(name = 'V_1852',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1818})
V_1853 = Vertex(name = 'V_1853',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1835})
V_1854 = Vertex(name = 'V_1854',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_488,(3,0):C.GC_490,(0,5):C.GC_489,(2,5):C.GC_491,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_488,(3,8):C.GC_490,(0,1):C.GC_489,(2,1):C.GC_491})
V_1855 = Vertex(name = 'V_1855',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_4141,(1,0):C.GC_2276,(3,0):C.GC_2286,(0,5):C.GC_2271,(2,5):C.GC_2281,(1,4):C.GC_2410,(3,4):C.GC_2415,(1,2):C.GC_4088,(3,2):C.GC_4093,(1,3):C.GC_505,(3,3):C.GC_506,(1,8):C.GC_2276,(3,8):C.GC_2286,(0,1):C.GC_2271,(2,1):C.GC_2281})
V_1856 = Vertex(name = 'V_1856',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_4110,(0,2):C.GC_2246,(1,0):C.GC_2294,(2,0):C.GC_2297})
V_1857 = Vertex(name = 'V_1857',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3951,(0,1):C.GC_2396})
V_1858 = Vertex(name = 'V_1858',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2237})
V_1859 = Vertex(name = 'V_1859',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2255})
V_1860 = Vertex(name = 'V_1860',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2395})
V_1861 = Vertex(name = 'V_1861',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2397})
V_1862 = Vertex(name = 'V_1862',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_3741,(0,6):C.GC_3693,(1,0):C.GC_3578,(3,0):C.GC_3584,(0,4):C.GC_3575,(2,4):C.GC_3581,(1,3):C.GC_3699,(3,3):C.GC_3707,(1,2):C.GC_3624,(3,2):C.GC_3627,(1,7):C.GC_3605,(3,7):C.GC_3615,(0,1):C.GC_3600,(2,1):C.GC_3610})
V_1863 = Vertex(name = 'V_1863',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3760,(0,1):C.GC_3544})
V_1864 = Vertex(name = 'V_1864',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3535,(0,1):C.GC_3945})
V_1865 = Vertex(name = 'V_1865',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3553})
V_1866 = Vertex(name = 'V_1866',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3938})
V_1867 = Vertex(name = 'V_1867',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.c ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3956})
V_1868 = Vertex(name = 'V_1868',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_1652,(0,6):C.GC_1630,(1,0):C.GC_1559,(3,0):C.GC_1565,(0,4):C.GC_1556,(2,4):C.GC_1562,(1,3):C.GC_1636,(3,3):C.GC_1638,(1,2):C.GC_1610,(3,2):C.GC_1613,(1,7):C.GC_1571,(3,7):C.GC_1576,(0,1):C.GC_1568,(2,1):C.GC_1573})
V_1869 = Vertex(name = 'V_1869',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1658,(0,1):C.GC_1503})
V_1870 = Vertex(name = 'V_1870',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1494,(0,1):C.GC_1831})
V_1871 = Vertex(name = 'V_1871',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1512})
V_1872 = Vertex(name = 'V_1872',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1822})
V_1873 = Vertex(name = 'V_1873',
particles = [ P.u__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_1839})
V_1874 = Vertex(name = 'V_1874',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,5):C.GC_2442,(0,6):C.GC_2393,(1,0):C.GC_2353,(3,0):C.GC_2359,(0,4):C.GC_2350,(2,4):C.GC_2356,(1,3):C.GC_2413,(3,3):C.GC_2418,(1,2):C.GC_2362,(3,2):C.GC_2365,(1,7):C.GC_2277,(3,7):C.GC_2287,(0,1):C.GC_2272,(2,1):C.GC_2282})
V_1875 = Vertex(name = 'V_1875',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2454,(0,1):C.GC_2249})
V_1876 = Vertex(name = 'V_1876',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2240,(0,1):C.GC_2620})
V_1877 = Vertex(name = 'V_1877',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2258})
V_1878 = Vertex(name = 'V_1878',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2616})
V_1879 = Vertex(name = 'V_1879',
particles = [ P.c__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_2623})
V_1880 = Vertex(name = 'V_1880',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_46,(1,0):C.GC_797,(3,0):C.GC_799,(0,5):C.GC_798,(2,5):C.GC_800,(1,4):C.GC_38,(3,4):C.GC_39,(1,2):C.GC_47,(3,2):C.GC_48,(1,3):C.GC_51,(3,3):C.GC_52,(1,8):C.GC_797,(3,8):C.GC_799,(0,1):C.GC_798,(2,1):C.GC_800})
V_1881 = Vertex(name = 'V_1881',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF1, L.FFFF11, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF2, L.FFFF3, L.FFFF4, L.FFFF9 ],
couplings = {(1,6):C.GC_43,(0,7):C.GC_4142,(1,0):C.GC_3608,(3,0):C.GC_3618,(0,5):C.GC_3603,(2,5):C.GC_3613,(1,4):C.GC_446,(3,4):C.GC_447,(1,2):C.GC_4089,(3,2):C.GC_4094,(1,3):C.GC_858,(3,3):C.GC_861,(1,8):C.GC_3608,(3,8):C.GC_3618,(0,1):C.GC_3603,(2,1):C.GC_3613})
V_1882 = Vertex(name = 'V_1882',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_4111,(0,3):C.GC_3547,(1,1):C.GC_3700,(2,1):C.GC_3708,(1,0):C.GC_3660,(2,0):C.GC_3663})
V_1883 = Vertex(name = 'V_1883',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3952,(0,1):C.GC_3953})
V_1884 = Vertex(name = 'V_1884',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3538})
V_1885 = Vertex(name = 'V_1885',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3556})
V_1886 = Vertex(name = 'V_1886',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3941})
V_1887 = Vertex(name = 'V_1887',
particles = [ P.t__tilde__, P.b, P.b__tilde__, P.t ],
color = [ 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3 ],
couplings = {(0,0):C.GC_3962})
V_1888 = Vertex(name = 'V_1888',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_1040,(0,9):C.GC_1039,(0,10):C.GC_1039,(0,6):C.GC_1035,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_1040,(0,4):C.GC_1039,(0,5):C.GC_1039,(0,0):C.GC_1035})
V_1889 = Vertex(name = 'V_1889',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_4061,(0,9):C.GC_4060,(0,10):C.GC_4060,(0,6):C.GC_4057,(0,1):C.GC_3739,(0,2):C.GC_1083,(0,3):C.GC_1082,(0,7):C.GC_4061,(0,4):C.GC_4060,(0,5):C.GC_4060,(0,0):C.GC_4057})
V_1890 = Vertex(name = 'V_1890',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3727})
V_1891 = Vertex(name = 'V_1891',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3733})
V_1892 = Vertex(name = 'V_1892',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_2427,(0,9):C.GC_2670,(0,7):C.GC_2669,(0,8):C.GC_2669,(0,4):C.GC_2668,(0,1):C.GC_2436,(0,5):C.GC_2559,(0,2):C.GC_2558,(0,3):C.GC_2558,(0,0):C.GC_2555})
V_1893 = Vertex(name = 'V_1893',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2431})
V_1894 = Vertex(name = 'V_1894',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_3724,(0,9):C.GC_4059,(0,7):C.GC_4058,(0,8):C.GC_4058,(0,4):C.GC_4056,(0,1):C.GC_3736,(0,5):C.GC_3918,(0,2):C.GC_3917,(0,3):C.GC_3917,(0,0):C.GC_3914})
V_1895 = Vertex(name = 'V_1895',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3729})
V_1896 = Vertex(name = 'V_1896',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_1643,(0,9):C.GC_1698,(0,7):C.GC_1697,(0,8):C.GC_1697,(0,4):C.GC_1696,(0,1):C.GC_1649,(0,5):C.GC_1798,(0,2):C.GC_1797,(0,3):C.GC_1797,(0,0):C.GC_1795})
V_1897 = Vertex(name = 'V_1897',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1646})
V_1898 = Vertex(name = 'V_1898',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_607,(0,9):C.GC_606,(0,10):C.GC_606,(0,6):C.GC_602,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_607,(0,4):C.GC_606,(0,5):C.GC_606,(0,0):C.GC_602})
V_1899 = Vertex(name = 'V_1899',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_2561,(0,9):C.GC_2560,(0,10):C.GC_2560,(0,6):C.GC_2556,(0,1):C.GC_2437,(0,2):C.GC_498,(0,3):C.GC_497,(0,7):C.GC_2561,(0,4):C.GC_2560,(0,5):C.GC_2560,(0,0):C.GC_2556})
V_1900 = Vertex(name = 'V_1900',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2428})
V_1901 = Vertex(name = 'V_1901',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2433})
V_1902 = Vertex(name = 'V_1902',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_3725,(0,9):C.GC_3826,(0,7):C.GC_3825,(0,8):C.GC_3825,(0,4):C.GC_3824,(0,1):C.GC_3737,(0,5):C.GC_3920,(0,2):C.GC_3919,(0,3):C.GC_3919,(0,0):C.GC_3915})
V_1903 = Vertex(name = 'V_1903',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3731})
V_1904 = Vertex(name = 'V_1904',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_1644,(0,9):C.GC_1731,(0,7):C.GC_1730,(0,8):C.GC_1730,(0,4):C.GC_1729,(0,1):C.GC_1650,(0,5):C.GC_1800,(0,2):C.GC_1799,(0,3):C.GC_1799,(0,0):C.GC_1796})
V_1905 = Vertex(name = 'V_1905',
particles = [ P.e__plus__, P.e__minus__, P.u__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1648})
V_1906 = Vertex(name = 'V_1906',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_2429,(0,9):C.GC_2609,(0,7):C.GC_2608,(0,8):C.GC_2608,(0,4):C.GC_2607,(0,1):C.GC_2438,(0,5):C.GC_2563,(0,2):C.GC_2562,(0,3):C.GC_2562,(0,0):C.GC_2557})
V_1907 = Vertex(name = 'V_1907',
particles = [ P.e__plus__, P.e__minus__, P.c__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2435})
V_1908 = Vertex(name = 'V_1908',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_818,(0,9):C.GC_817,(0,10):C.GC_817,(0,6):C.GC_813,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_818,(0,4):C.GC_817,(0,5):C.GC_817,(0,0):C.GC_813})
V_1909 = Vertex(name = 'V_1909',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_3922,(0,9):C.GC_3921,(0,10):C.GC_3921,(0,6):C.GC_3916,(0,1):C.GC_3738,(0,2):C.GC_847,(0,3):C.GC_846,(0,7):C.GC_3922,(0,4):C.GC_3921,(0,5):C.GC_3921,(0,0):C.GC_3916})
V_1910 = Vertex(name = 'V_1910',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3726})
V_1911 = Vertex(name = 'V_1911',
particles = [ P.e__plus__, P.e__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3734})
V_1912 = Vertex(name = 'V_1912',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_1052,(0,9):C.GC_1051,(0,10):C.GC_1051,(0,6):C.GC_1047,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_1052,(0,4):C.GC_1051,(0,5):C.GC_1051,(0,0):C.GC_1047})
V_1913 = Vertex(name = 'V_1913',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_4067,(0,9):C.GC_4066,(0,10):C.GC_4066,(0,6):C.GC_4063,(0,1):C.GC_3739,(0,2):C.GC_1083,(0,3):C.GC_1082,(0,7):C.GC_4067,(0,4):C.GC_4066,(0,5):C.GC_4066,(0,0):C.GC_4063})
V_1914 = Vertex(name = 'V_1914',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3727})
V_1915 = Vertex(name = 'V_1915',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3733})
V_1916 = Vertex(name = 'V_1916',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_2427,(0,9):C.GC_2673,(0,7):C.GC_2672,(0,8):C.GC_2672,(0,4):C.GC_2671,(0,1):C.GC_2436,(0,5):C.GC_2572,(0,2):C.GC_2571,(0,3):C.GC_2571,(0,0):C.GC_2568})
V_1917 = Vertex(name = 'V_1917',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2431})
V_1918 = Vertex(name = 'V_1918',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_3724,(0,9):C.GC_4065,(0,7):C.GC_4064,(0,8):C.GC_4064,(0,4):C.GC_4062,(0,1):C.GC_3736,(0,5):C.GC_3927,(0,2):C.GC_3926,(0,3):C.GC_3926,(0,0):C.GC_3923})
V_1919 = Vertex(name = 'V_1919',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3729})
V_1920 = Vertex(name = 'V_1920',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_1643,(0,9):C.GC_1701,(0,7):C.GC_1700,(0,8):C.GC_1700,(0,4):C.GC_1699,(0,1):C.GC_1649,(0,5):C.GC_1804,(0,2):C.GC_1803,(0,3):C.GC_1803,(0,0):C.GC_1801})
V_1921 = Vertex(name = 'V_1921',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1646})
V_1922 = Vertex(name = 'V_1922',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_664,(0,9):C.GC_663,(0,10):C.GC_663,(0,6):C.GC_659,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_664,(0,4):C.GC_663,(0,5):C.GC_663,(0,0):C.GC_659})
V_1923 = Vertex(name = 'V_1923',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_2574,(0,9):C.GC_2573,(0,10):C.GC_2573,(0,6):C.GC_2569,(0,1):C.GC_2437,(0,2):C.GC_498,(0,3):C.GC_497,(0,7):C.GC_2574,(0,4):C.GC_2573,(0,5):C.GC_2573,(0,0):C.GC_2569})
V_1924 = Vertex(name = 'V_1924',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2428})
V_1925 = Vertex(name = 'V_1925',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2433})
V_1926 = Vertex(name = 'V_1926',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_3725,(0,9):C.GC_3829,(0,7):C.GC_3828,(0,8):C.GC_3828,(0,4):C.GC_3827,(0,1):C.GC_3737,(0,5):C.GC_3929,(0,2):C.GC_3928,(0,3):C.GC_3928,(0,0):C.GC_3924})
V_1927 = Vertex(name = 'V_1927',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3731})
V_1928 = Vertex(name = 'V_1928',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_1644,(0,9):C.GC_1734,(0,7):C.GC_1733,(0,8):C.GC_1733,(0,4):C.GC_1732,(0,1):C.GC_1650,(0,5):C.GC_1806,(0,2):C.GC_1805,(0,3):C.GC_1805,(0,0):C.GC_1802})
V_1929 = Vertex(name = 'V_1929',
particles = [ P.mu__plus__, P.mu__minus__, P.u__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1648})
V_1930 = Vertex(name = 'V_1930',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_2429,(0,9):C.GC_2612,(0,7):C.GC_2611,(0,8):C.GC_2611,(0,4):C.GC_2610,(0,1):C.GC_2438,(0,5):C.GC_2576,(0,2):C.GC_2575,(0,3):C.GC_2575,(0,0):C.GC_2570})
V_1931 = Vertex(name = 'V_1931',
particles = [ P.mu__plus__, P.mu__minus__, P.c__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2435})
V_1932 = Vertex(name = 'V_1932',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_830,(0,9):C.GC_829,(0,10):C.GC_829,(0,6):C.GC_825,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_830,(0,4):C.GC_829,(0,5):C.GC_829,(0,0):C.GC_825})
V_1933 = Vertex(name = 'V_1933',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_3931,(0,9):C.GC_3930,(0,10):C.GC_3930,(0,6):C.GC_3925,(0,1):C.GC_3738,(0,2):C.GC_847,(0,3):C.GC_846,(0,7):C.GC_3931,(0,4):C.GC_3930,(0,5):C.GC_3930,(0,0):C.GC_3925})
V_1934 = Vertex(name = 'V_1934',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3726})
V_1935 = Vertex(name = 'V_1935',
particles = [ P.mu__plus__, P.mu__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3734})
V_1936 = Vertex(name = 'V_1936',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_1073,(0,9):C.GC_1072,(0,10):C.GC_1072,(0,6):C.GC_1068,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_1073,(0,4):C.GC_1072,(0,5):C.GC_1072,(0,0):C.GC_1068})
V_1937 = Vertex(name = 'V_1937',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_4077,(0,9):C.GC_4076,(0,10):C.GC_4076,(0,6):C.GC_4073,(0,1):C.GC_3739,(0,2):C.GC_1083,(0,3):C.GC_1082,(0,7):C.GC_4077,(0,4):C.GC_4076,(0,5):C.GC_4076,(0,0):C.GC_4073})
V_1938 = Vertex(name = 'V_1938',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3727})
V_1939 = Vertex(name = 'V_1939',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3733})
V_1940 = Vertex(name = 'V_1940',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_2427,(0,9):C.GC_2676,(0,7):C.GC_2675,(0,8):C.GC_2675,(0,4):C.GC_2674,(0,1):C.GC_2436,(0,5):C.GC_2633,(0,2):C.GC_2632,(0,3):C.GC_2632,(0,0):C.GC_2629})
V_1941 = Vertex(name = 'V_1941',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2431})
V_1942 = Vertex(name = 'V_1942',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_3724,(0,9):C.GC_4075,(0,7):C.GC_4074,(0,8):C.GC_4074,(0,4):C.GC_4072,(0,1):C.GC_3736,(0,5):C.GC_3981,(0,2):C.GC_3980,(0,3):C.GC_3980,(0,0):C.GC_3977})
V_1943 = Vertex(name = 'V_1943',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.u ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3729})
V_1944 = Vertex(name = 'V_1944',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_1643,(0,9):C.GC_1737,(0,7):C.GC_1736,(0,8):C.GC_1736,(0,4):C.GC_1735,(0,1):C.GC_1649,(0,5):C.GC_1810,(0,2):C.GC_1809,(0,3):C.GC_1809,(0,0):C.GC_1807})
V_1945 = Vertex(name = 'V_1945',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1646})
V_1946 = Vertex(name = 'V_1946',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_935,(0,9):C.GC_934,(0,10):C.GC_934,(0,6):C.GC_930,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_935,(0,4):C.GC_934,(0,5):C.GC_934,(0,0):C.GC_930})
V_1947 = Vertex(name = 'V_1947',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_2635,(0,9):C.GC_2634,(0,10):C.GC_2634,(0,6):C.GC_2630,(0,1):C.GC_2437,(0,2):C.GC_498,(0,3):C.GC_497,(0,7):C.GC_2635,(0,4):C.GC_2634,(0,5):C.GC_2634,(0,0):C.GC_2630})
V_1948 = Vertex(name = 'V_1948',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2428})
V_1949 = Vertex(name = 'V_1949',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2433})
V_1950 = Vertex(name = 'V_1950',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_3725,(0,9):C.GC_3976,(0,7):C.GC_3975,(0,8):C.GC_3975,(0,4):C.GC_3974,(0,1):C.GC_3737,(0,5):C.GC_3983,(0,2):C.GC_3982,(0,3):C.GC_3982,(0,0):C.GC_3978})
V_1951 = Vertex(name = 'V_1951',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.c ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3731})
V_1952 = Vertex(name = 'V_1952',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_1644,(0,9):C.GC_1740,(0,7):C.GC_1739,(0,8):C.GC_1739,(0,4):C.GC_1738,(0,1):C.GC_1650,(0,5):C.GC_1812,(0,2):C.GC_1811,(0,3):C.GC_1811,(0,0):C.GC_1808})
V_1953 = Vertex(name = 'V_1953',
particles = [ P.ta__plus__, P.ta__minus__, P.u__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1648})
V_1954 = Vertex(name = 'V_1954',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,6):C.GC_2429,(0,9):C.GC_2640,(0,7):C.GC_2639,(0,8):C.GC_2639,(0,4):C.GC_2638,(0,1):C.GC_2438,(0,5):C.GC_2637,(0,2):C.GC_2636,(0,3):C.GC_2636,(0,0):C.GC_2631})
V_1955 = Vertex(name = 'V_1955',
particles = [ P.ta__plus__, P.ta__minus__, P.c__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2435})
V_1956 = Vertex(name = 'V_1956',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_25,(0,11):C.GC_955,(0,9):C.GC_954,(0,10):C.GC_954,(0,6):C.GC_950,(0,1):C.GC_40,(0,2):C.GC_37,(0,3):C.GC_14,(0,7):C.GC_955,(0,4):C.GC_954,(0,5):C.GC_954,(0,0):C.GC_950})
V_1957 = Vertex(name = 'V_1957',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF11, L.FFFF12, L.FFFF16, L.FFFF17, L.FFFF18, L.FFFF19, L.FFFF2, L.FFFF20, L.FFFF4, L.FFFF5, L.FFFF6, L.FFFF7 ],
couplings = {(0,8):C.GC_26,(0,11):C.GC_3985,(0,9):C.GC_3984,(0,10):C.GC_3984,(0,6):C.GC_3979,(0,1):C.GC_3738,(0,2):C.GC_847,(0,3):C.GC_846,(0,7):C.GC_3985,(0,4):C.GC_3984,(0,5):C.GC_3984,(0,0):C.GC_3979})
V_1958 = Vertex(name = 'V_1958',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3726})
V_1959 = Vertex(name = 'V_1959',
particles = [ P.ta__plus__, P.ta__minus__, P.t__tilde__, P.t ],
color = [ 'Identity(3,4)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3734})
V_1960 = Vertex(name = 'V_1960',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_41,(3,3):C.GC_48,(1,1):C.GC_47,(3,1):C.GC_48,(1,2):C.GC_53,(0,4):C.GC_47,(2,4):C.GC_48,(0,5):C.GC_53,(0,0):C.GC_47,(2,0):C.GC_48,(1,3):C.GC_47})
V_1961 = Vertex(name = 'V_1961',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_42,(0,7):C.GC_42,(3,3):C.GC_3723,(1,1):C.GC_3718,(3,1):C.GC_3723,(1,2):C.GC_54,(0,4):C.GC_3718,(2,4):C.GC_3723,(0,5):C.GC_54,(0,0):C.GC_3718,(2,0):C.GC_3723,(1,3):C.GC_3718})
V_1962 = Vertex(name = 'V_1962',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_44,(0,7):C.GC_44,(3,3):C.GC_1081,(1,1):C.GC_1080,(3,1):C.GC_1081,(1,2):C.GC_1084,(0,4):C.GC_1080,(2,4):C.GC_1081,(0,5):C.GC_1084,(0,0):C.GC_1080,(2,0):C.GC_1081,(1,3):C.GC_1080})
V_1963 = Vertex(name = 'V_1963',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF14, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_45,(0,3):C.GC_45,(1,0):C.GC_1085,(0,1):C.GC_1085})
V_1964 = Vertex(name = 'V_1964',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3749,(0,1):C.GC_3749})
V_1965 = Vertex(name = 'V_1965',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3756,(0,1):C.GC_3756})
V_1966 = Vertex(name = 'V_1966',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3765,(0,1):C.GC_3765})
V_1967 = Vertex(name = 'V_1967',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3774,(0,1):C.GC_3774})
V_1968 = Vertex(name = 'V_1968',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2439,(0,3):C.GC_2439,(1,0):C.GC_2419,(3,0):C.GC_2423,(0,1):C.GC_2419,(2,1):C.GC_2423})
V_1969 = Vertex(name = 'V_1969',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2443,(0,1):C.GC_2443})
V_1970 = Vertex(name = 'V_1970',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2448,(0,1):C.GC_2448})
V_1971 = Vertex(name = 'V_1971',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2455,(0,1):C.GC_2455})
V_1972 = Vertex(name = 'V_1972',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3740,(0,3):C.GC_3740,(1,0):C.GC_3714,(3,0):C.GC_3719,(0,1):C.GC_3714,(2,1):C.GC_3719})
V_1973 = Vertex(name = 'V_1973',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3750,(0,1):C.GC_3750})
V_1974 = Vertex(name = 'V_1974',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3757,(0,1):C.GC_3757})
V_1975 = Vertex(name = 'V_1975',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.u ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3766,(0,1):C.GC_3766})
V_1976 = Vertex(name = 'V_1976',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_1651,(0,3):C.GC_1651,(0,0):C.GC_1639,(2,0):C.GC_1641,(1,1):C.GC_1639,(3,1):C.GC_1641})
V_1977 = Vertex(name = 'V_1977',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1653,(0,1):C.GC_1653})
V_1978 = Vertex(name = 'V_1978',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1655,(0,1):C.GC_1655})
V_1979 = Vertex(name = 'V_1979',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1659,(0,1):C.GC_1659})
V_1980 = Vertex(name = 'V_1980',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_42,(0,0):C.GC_1026,(2,0):C.GC_1027,(1,3):C.GC_47,(3,3):C.GC_48,(1,1):C.GC_47,(3,1):C.GC_48,(1,2):C.GC_53,(0,4):C.GC_1026,(2,4):C.GC_1027,(0,5):C.GC_54})
V_1981 = Vertex(name = 'V_1981',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_44,(0,5):C.GC_45,(1,2):C.GC_2388,(2,2):C.GC_2389,(1,0):C.GC_1623,(2,0):C.GC_1625,(1,1):C.GC_1089,(0,3):C.GC_1091})
V_1982 = Vertex(name = 'V_1982',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3745,(0,1):C.GC_3754})
V_1983 = Vertex(name = 'V_1983',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3763,(0,1):C.GC_3770})
V_1984 = Vertex(name = 'V_1984',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3741,(0,2):C.GC_3751,(1,0):C.GC_3715,(2,0):C.GC_3720})
V_1985 = Vertex(name = 'V_1985',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3759,(0,1):C.GC_3767})
V_1986 = Vertex(name = 'V_1986',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_1652,(0,3):C.GC_1652,(0,0):C.GC_1640,(2,0):C.GC_1642,(1,1):C.GC_1640,(3,1):C.GC_1642})
V_1987 = Vertex(name = 'V_1987',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1654,(0,1):C.GC_1654})
V_1988 = Vertex(name = 'V_1988',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1657,(0,1):C.GC_1657})
V_1989 = Vertex(name = 'V_1989',
particles = [ P.u__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1660,(0,1):C.GC_1660})
V_1990 = Vertex(name = 'V_1990',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2442,(0,2):C.GC_2447,(1,0):C.GC_2422,(2,0):C.GC_2426})
V_1991 = Vertex(name = 'V_1991',
particles = [ P.c__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2453,(0,1):C.GC_2459})
V_1992 = Vertex(name = 'V_1992',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_42,(0,0):C.GC_1066,(2,0):C.GC_1067,(1,3):C.GC_47,(3,3):C.GC_48,(1,1):C.GC_47,(3,1):C.GC_48,(1,2):C.GC_53,(0,4):C.GC_1066,(2,4):C.GC_1067,(0,5):C.GC_54})
V_1993 = Vertex(name = 'V_1993',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_44,(0,5):C.GC_45,(1,2):C.GC_3686,(2,2):C.GC_3687,(1,0):C.GC_1624,(2,0):C.GC_1626,(1,1):C.GC_1090,(0,3):C.GC_1092})
V_1994 = Vertex(name = 'V_1994',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2440,(0,1):C.GC_2445})
V_1995 = Vertex(name = 'V_1995',
particles = [ P.t__tilde__, P.u, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2451,(0,1):C.GC_2457})
V_1996 = Vertex(name = 'V_1996',
particles = [ P.c__tilde__, P.u, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2439,(0,3):C.GC_2439,(1,0):C.GC_2419,(3,0):C.GC_2423,(0,1):C.GC_2419,(2,1):C.GC_2423})
V_1997 = Vertex(name = 'V_1997',
particles = [ P.c__tilde__, P.u, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2443,(0,1):C.GC_2443})
V_1998 = Vertex(name = 'V_1998',
particles = [ P.c__tilde__, P.u, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2448,(0,1):C.GC_2448})
V_1999 = Vertex(name = 'V_1999',
particles = [ P.c__tilde__, P.u, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2455,(0,1):C.GC_2455})
V_2000 = Vertex(name = 'V_2000',
particles = [ P.t__tilde__, P.u, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)' ],
lorentz = [ L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_3750,(0,2):C.GC_3740,(0,0):C.GC_3714,(2,0):C.GC_3719})
V_2001 = Vertex(name = 'V_2001',
particles = [ P.t__tilde__, P.u, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3766,(0,1):C.GC_3757})
V_2002 = Vertex(name = 'V_2002',
particles = [ P.t__tilde__, P.u, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_2439,(0,2):C.GC_2443,(1,0):C.GC_2419,(2,0):C.GC_2423})
V_2003 = Vertex(name = 'V_2003',
particles = [ P.t__tilde__, P.u, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2448,(0,1):C.GC_2455})
V_2004 = Vertex(name = 'V_2004',
particles = [ P.t__tilde__, P.u, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3740,(0,3):C.GC_3740,(1,0):C.GC_3714,(3,0):C.GC_3719,(0,1):C.GC_3714,(2,1):C.GC_3719})
V_2005 = Vertex(name = 'V_2005',
particles = [ P.t__tilde__, P.u, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3750,(0,1):C.GC_3750})
V_2006 = Vertex(name = 'V_2006',
particles = [ P.t__tilde__, P.u, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3757,(0,1):C.GC_3757})
V_2007 = Vertex(name = 'V_2007',
particles = [ P.t__tilde__, P.u, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3766,(0,1):C.GC_3766})
V_2008 = Vertex(name = 'V_2008',
particles = [ P.c__tilde__, P.c, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_1651,(0,3):C.GC_1651,(0,0):C.GC_1639,(2,0):C.GC_1641,(1,1):C.GC_1639,(3,1):C.GC_1641})
V_2009 = Vertex(name = 'V_2009',
particles = [ P.c__tilde__, P.c, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1653,(0,1):C.GC_1653})
V_2010 = Vertex(name = 'V_2010',
particles = [ P.c__tilde__, P.c, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1655,(0,1):C.GC_1655})
V_2011 = Vertex(name = 'V_2011',
particles = [ P.c__tilde__, P.c, P.u__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1659,(0,1):C.GC_1659})
V_2012 = Vertex(name = 'V_2012',
particles = [ P.c__tilde__, P.c, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)' ],
lorentz = [ L.FFFF12, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1654,(0,2):C.GC_1652,(0,0):C.GC_1640,(2,0):C.GC_1642})
V_2013 = Vertex(name = 'V_2013',
particles = [ P.c__tilde__, P.c, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1660,(0,1):C.GC_1657})
V_2014 = Vertex(name = 'V_2014',
particles = [ P.t__tilde__, P.c, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,1):C.GC_1651,(0,2):C.GC_1653,(1,0):C.GC_1639,(2,0):C.GC_1641})
V_2015 = Vertex(name = 'V_2015',
particles = [ P.t__tilde__, P.c, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1655,(0,1):C.GC_1659})
V_2016 = Vertex(name = 'V_2016',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_41,(0,0):C.GC_47,(2,0):C.GC_48,(1,3):C.GC_47,(3,3):C.GC_48,(1,1):C.GC_47,(3,1):C.GC_48,(1,2):C.GC_53,(0,4):C.GC_47,(2,4):C.GC_48,(0,5):C.GC_53})
V_2017 = Vertex(name = 'V_2017',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_42,(0,7):C.GC_42,(0,0):C.GC_495,(2,0):C.GC_496,(1,3):C.GC_495,(3,3):C.GC_496,(1,1):C.GC_495,(3,1):C.GC_496,(1,2):C.GC_54,(0,4):C.GC_495,(2,4):C.GC_496,(0,5):C.GC_54})
V_2018 = Vertex(name = 'V_2018',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_44,(0,7):C.GC_44,(0,0):C.GC_2420,(2,0):C.GC_2424,(1,3):C.GC_2420,(3,3):C.GC_2424,(1,1):C.GC_2420,(3,1):C.GC_2424,(1,2):C.GC_499,(0,4):C.GC_2420,(2,4):C.GC_2424,(0,5):C.GC_499})
V_2019 = Vertex(name = 'V_2019',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF14, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_45,(0,3):C.GC_45,(1,0):C.GC_500,(0,1):C.GC_500})
V_2020 = Vertex(name = 'V_2020',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2441,(0,1):C.GC_2441})
V_2021 = Vertex(name = 'V_2021',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2446,(0,1):C.GC_2446})
V_2022 = Vertex(name = 'V_2022',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2452,(0,1):C.GC_2452})
V_2023 = Vertex(name = 'V_2023',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2458,(0,1):C.GC_2458})
V_2024 = Vertex(name = 'V_2024',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF15, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3741,(0,3):C.GC_3741,(1,0):C.GC_3715,(3,0):C.GC_3720,(0,1):C.GC_3715,(2,1):C.GC_3720})
V_2025 = Vertex(name = 'V_2025',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3751,(0,1):C.GC_3751})
V_2026 = Vertex(name = 'V_2026',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3759,(0,1):C.GC_3759})
V_2027 = Vertex(name = 'V_2027',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.c ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3767,(0,1):C.GC_3767})
V_2028 = Vertex(name = 'V_2028',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF15, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2442,(0,3):C.GC_2442,(0,0):C.GC_2422,(2,0):C.GC_2426,(1,1):C.GC_2422,(3,1):C.GC_2426})
V_2029 = Vertex(name = 'V_2029',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2447,(0,1):C.GC_2447})
V_2030 = Vertex(name = 'V_2030',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2453,(0,1):C.GC_2453})
V_2031 = Vertex(name = 'V_2031',
particles = [ P.c__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2459,(0,1):C.GC_2459})
V_2032 = Vertex(name = 'V_2032',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_42,(0,0):C.GC_804,(2,0):C.GC_805,(1,3):C.GC_47,(3,3):C.GC_48,(1,1):C.GC_47,(3,1):C.GC_48,(1,2):C.GC_53,(0,4):C.GC_804,(2,4):C.GC_805,(0,5):C.GC_54})
V_2033 = Vertex(name = 'V_2033',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,4):C.GC_44,(0,5):C.GC_45,(1,2):C.GC_3716,(2,2):C.GC_3721,(1,0):C.GC_2421,(2,0):C.GC_2425,(1,1):C.GC_853,(0,3):C.GC_854})
V_2034 = Vertex(name = 'V_2034',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3742,(0,1):C.GC_3753})
V_2035 = Vertex(name = 'V_2035',
particles = [ P.t__tilde__, P.c, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3762,(0,1):C.GC_3769})
V_2036 = Vertex(name = 'V_2036',
particles = [ P.t__tilde__, P.c, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF13, L.FFFF16, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_3741,(0,3):C.GC_3741,(1,0):C.GC_3715,(3,0):C.GC_3720,(0,1):C.GC_3715,(2,1):C.GC_3720})
V_2037 = Vertex(name = 'V_2037',
particles = [ P.t__tilde__, P.c, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3751,(0,1):C.GC_3751})
V_2038 = Vertex(name = 'V_2038',
particles = [ P.t__tilde__, P.c, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3759,(0,1):C.GC_3759})
V_2039 = Vertex(name = 'V_2039',
particles = [ P.t__tilde__, P.c, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3767,(0,1):C.GC_3767})
V_2040 = Vertex(name = 'V_2040',
particles = [ P.t__tilde__, P.t, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_1652,(0,3):C.GC_1652,(0,0):C.GC_1640,(2,0):C.GC_1642,(1,1):C.GC_1640,(3,1):C.GC_1642})
V_2041 = Vertex(name = 'V_2041',
particles = [ P.t__tilde__, P.t, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1654,(0,1):C.GC_1654})
V_2042 = Vertex(name = 'V_2042',
particles = [ P.t__tilde__, P.t, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1657,(0,1):C.GC_1657})
V_2043 = Vertex(name = 'V_2043',
particles = [ P.t__tilde__, P.t, P.u__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_1660,(0,1):C.GC_1660})
V_2044 = Vertex(name = 'V_2044',
particles = [ P.t__tilde__, P.t, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_2442,(0,3):C.GC_2442,(0,0):C.GC_2422,(2,0):C.GC_2426,(1,1):C.GC_2422,(3,1):C.GC_2426})
V_2045 = Vertex(name = 'V_2045',
particles = [ P.t__tilde__, P.t, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2447,(0,1):C.GC_2447})
V_2046 = Vertex(name = 'V_2046',
particles = [ P.t__tilde__, P.t, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2453,(0,1):C.GC_2453})
V_2047 = Vertex(name = 'V_2047',
particles = [ P.t__tilde__, P.t, P.c__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_2459,(0,1):C.GC_2459})
V_2048 = Vertex(name = 'V_2048',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_41,(0,7):C.GC_41,(0,0):C.GC_47,(2,0):C.GC_48,(1,3):C.GC_47,(3,3):C.GC_48,(1,1):C.GC_47,(3,1):C.GC_48,(1,2):C.GC_53,(0,4):C.GC_47,(2,4):C.GC_48,(0,5):C.GC_53})
V_2049 = Vertex(name = 'V_2049',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_42,(0,7):C.GC_42,(0,0):C.GC_3717,(2,0):C.GC_3722,(1,3):C.GC_3717,(3,3):C.GC_3722,(1,1):C.GC_3717,(3,1):C.GC_3722,(1,2):C.GC_54,(0,4):C.GC_3717,(2,4):C.GC_3722,(0,5):C.GC_54})
V_2050 = Vertex(name = 'V_2050',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF14, L.FFFF15, L.FFFF16, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,6):C.GC_44,(0,7):C.GC_44,(0,0):C.GC_844,(2,0):C.GC_845,(1,3):C.GC_844,(3,3):C.GC_845,(1,1):C.GC_844,(3,1):C.GC_845,(1,2):C.GC_848,(0,4):C.GC_844,(2,4):C.GC_845,(0,5):C.GC_848})
V_2051 = Vertex(name = 'V_2051',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF14, L.FFFF17, L.FFFF3, L.FFFF4 ],
couplings = {(1,2):C.GC_45,(0,3):C.GC_45,(1,0):C.GC_849,(0,1):C.GC_849})
V_2052 = Vertex(name = 'V_2052',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3748,(0,1):C.GC_3748})
V_2053 = Vertex(name = 'V_2053',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3755,(0,1):C.GC_3755})
V_2054 = Vertex(name = 'V_2054',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3764,(0,1):C.GC_3764})
V_2055 = Vertex(name = 'V_2055',
particles = [ P.t__tilde__, P.t, P.t__tilde__, P.t ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(1,0):C.GC_3771,(0,1):C.GC_3771})
V_2056 = Vertex(name = 'V_2056',
particles = [ P.e__plus__, P.e__minus__, P.ve__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2057 = Vertex(name = 'V_2057',
particles = [ P.e__plus__, P.e__minus__, P.ve__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2058 = Vertex(name = 'V_2058',
particles = [ P.e__plus__, P.e__minus__, P.vm__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2059 = Vertex(name = 'V_2059',
particles = [ P.e__plus__, P.e__minus__, P.vt__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2060 = Vertex(name = 'V_2060',
particles = [ P.mu__plus__, P.e__minus__, P.ve__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2061 = Vertex(name = 'V_2061',
particles = [ P.ta__plus__, P.e__minus__, P.ve__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2062 = Vertex(name = 'V_2062',
particles = [ P.e__plus__, P.mu__minus__, P.vm__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2063 = Vertex(name = 'V_2063',
particles = [ P.mu__plus__, P.mu__minus__, P.ve__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2064 = Vertex(name = 'V_2064',
particles = [ P.mu__plus__, P.mu__minus__, P.vm__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2065 = Vertex(name = 'V_2065',
particles = [ P.mu__plus__, P.mu__minus__, P.vm__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2066 = Vertex(name = 'V_2066',
particles = [ P.mu__plus__, P.mu__minus__, P.vt__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2067 = Vertex(name = 'V_2067',
particles = [ P.ta__plus__, P.mu__minus__, P.vm__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2068 = Vertex(name = 'V_2068',
particles = [ P.e__plus__, P.ta__minus__, P.vt__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2069 = Vertex(name = 'V_2069',
particles = [ P.mu__plus__, P.ta__minus__, P.vt__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2070 = Vertex(name = 'V_2070',
particles = [ P.ta__plus__, P.ta__minus__, P.ve__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2071 = Vertex(name = 'V_2071',
particles = [ P.ta__plus__, P.ta__minus__, P.vm__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2072 = Vertex(name = 'V_2072',
particles = [ P.ta__plus__, P.ta__minus__, P.vt__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_23,(0,0):C.GC_22})
V_2073 = Vertex(name = 'V_2073',
particles = [ P.ta__plus__, P.ta__minus__, P.vt__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_24})
V_2074 = Vertex(name = 'V_2074',
particles = [ P.ve__tilde__, P.ve, P.ve__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_23,(0,1):C.GC_23})
V_2075 = Vertex(name = 'V_2075',
particles = [ P.ve__tilde__, P.ve, P.ve__tilde__, P.ve ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_24,(0,1):C.GC_24})
V_2076 = Vertex(name = 'V_2076',
particles = [ P.vm__tilde__, P.ve, P.ve__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_23,(0,1):C.GC_24})
V_2077 = Vertex(name = 'V_2077',
particles = [ P.vt__tilde__, P.ve, P.ve__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_23,(0,1):C.GC_24})
V_2078 = Vertex(name = 'V_2078',
particles = [ P.vm__tilde__, P.vm, P.vm__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_23,(0,1):C.GC_23})
V_2079 = Vertex(name = 'V_2079',
particles = [ P.vm__tilde__, P.vm, P.vm__tilde__, P.vm ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_24,(0,1):C.GC_24})
V_2080 = Vertex(name = 'V_2080',
particles = [ P.vt__tilde__, P.vm, P.vm__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_23,(0,1):C.GC_24})
V_2081 = Vertex(name = 'V_2081',
particles = [ P.vt__tilde__, P.vt, P.vt__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_23,(0,1):C.GC_23})
V_2082 = Vertex(name = 'V_2082',
particles = [ P.vt__tilde__, P.vt, P.vt__tilde__, P.vt ],
color = [ '1' ],
lorentz = [ L.FFFF3, L.FFFF4 ],
couplings = {(0,0):C.GC_24,(0,1):C.GC_24})
V_2083 = Vertex(name = 'V_2083',
particles = [ P.u__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2084 = Vertex(name = 'V_2084',
particles = [ P.u__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_1083})
V_2085 = Vertex(name = 'V_2085',
particles = [ P.u__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3727})
V_2086 = Vertex(name = 'V_2086',
particles = [ P.u__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3735})
V_2087 = Vertex(name = 'V_2087',
particles = [ P.u__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2088 = Vertex(name = 'V_2088',
particles = [ P.u__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_1083})
V_2089 = Vertex(name = 'V_2089',
particles = [ P.u__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3727})
V_2090 = Vertex(name = 'V_2090',
particles = [ P.u__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3735})
V_2091 = Vertex(name = 'V_2091',
particles = [ P.u__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2092 = Vertex(name = 'V_2092',
particles = [ P.u__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_1083})
V_2093 = Vertex(name = 'V_2093',
particles = [ P.u__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3727})
V_2094 = Vertex(name = 'V_2094',
particles = [ P.u__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3735})
V_2095 = Vertex(name = 'V_2095',
particles = [ P.c__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2427})
V_2096 = Vertex(name = 'V_2096',
particles = [ P.c__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2430})
V_2097 = Vertex(name = 'V_2097',
particles = [ P.c__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2427})
V_2098 = Vertex(name = 'V_2098',
particles = [ P.c__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2430})
V_2099 = Vertex(name = 'V_2099',
particles = [ P.c__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2427})
V_2100 = Vertex(name = 'V_2100',
particles = [ P.c__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2430})
V_2101 = Vertex(name = 'V_2101',
particles = [ P.t__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3724})
V_2102 = Vertex(name = 'V_2102',
particles = [ P.t__tilde__, P.u, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3728})
V_2103 = Vertex(name = 'V_2103',
particles = [ P.t__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3724})
V_2104 = Vertex(name = 'V_2104',
particles = [ P.t__tilde__, P.u, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3728})
V_2105 = Vertex(name = 'V_2105',
particles = [ P.t__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3724})
V_2106 = Vertex(name = 'V_2106',
particles = [ P.t__tilde__, P.u, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3728})
V_2107 = Vertex(name = 'V_2107',
particles = [ P.u__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1643})
V_2108 = Vertex(name = 'V_2108',
particles = [ P.u__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1645})
V_2109 = Vertex(name = 'V_2109',
particles = [ P.u__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1643})
V_2110 = Vertex(name = 'V_2110',
particles = [ P.u__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1645})
V_2111 = Vertex(name = 'V_2111',
particles = [ P.u__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1643})
V_2112 = Vertex(name = 'V_2112',
particles = [ P.u__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1645})
V_2113 = Vertex(name = 'V_2113',
particles = [ P.c__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2114 = Vertex(name = 'V_2114',
particles = [ P.c__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_498})
V_2115 = Vertex(name = 'V_2115',
particles = [ P.c__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2428})
V_2116 = Vertex(name = 'V_2116',
particles = [ P.c__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2432})
V_2117 = Vertex(name = 'V_2117',
particles = [ P.c__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2118 = Vertex(name = 'V_2118',
particles = [ P.c__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_498})
V_2119 = Vertex(name = 'V_2119',
particles = [ P.c__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2428})
V_2120 = Vertex(name = 'V_2120',
particles = [ P.c__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2432})
V_2121 = Vertex(name = 'V_2121',
particles = [ P.c__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2122 = Vertex(name = 'V_2122',
particles = [ P.c__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_498})
V_2123 = Vertex(name = 'V_2123',
particles = [ P.c__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2428})
V_2124 = Vertex(name = 'V_2124',
particles = [ P.c__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2432})
V_2125 = Vertex(name = 'V_2125',
particles = [ P.t__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3725})
V_2126 = Vertex(name = 'V_2126',
particles = [ P.t__tilde__, P.c, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3730})
V_2127 = Vertex(name = 'V_2127',
particles = [ P.t__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3725})
V_2128 = Vertex(name = 'V_2128',
particles = [ P.t__tilde__, P.c, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3730})
V_2129 = Vertex(name = 'V_2129',
particles = [ P.t__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3725})
V_2130 = Vertex(name = 'V_2130',
particles = [ P.t__tilde__, P.c, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3730})
V_2131 = Vertex(name = 'V_2131',
particles = [ P.u__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1644})
V_2132 = Vertex(name = 'V_2132',
particles = [ P.u__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1647})
V_2133 = Vertex(name = 'V_2133',
particles = [ P.u__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1644})
V_2134 = Vertex(name = 'V_2134',
particles = [ P.u__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1647})
V_2135 = Vertex(name = 'V_2135',
particles = [ P.u__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1644})
V_2136 = Vertex(name = 'V_2136',
particles = [ P.u__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_1647})
V_2137 = Vertex(name = 'V_2137',
particles = [ P.c__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2429})
V_2138 = Vertex(name = 'V_2138',
particles = [ P.c__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2434})
V_2139 = Vertex(name = 'V_2139',
particles = [ P.c__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2429})
V_2140 = Vertex(name = 'V_2140',
particles = [ P.c__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2434})
V_2141 = Vertex(name = 'V_2141',
particles = [ P.c__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2429})
V_2142 = Vertex(name = 'V_2142',
particles = [ P.c__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2434})
V_2143 = Vertex(name = 'V_2143',
particles = [ P.t__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2144 = Vertex(name = 'V_2144',
particles = [ P.t__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_847})
V_2145 = Vertex(name = 'V_2145',
particles = [ P.t__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3726})
V_2146 = Vertex(name = 'V_2146',
particles = [ P.t__tilde__, P.t, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3732})
V_2147 = Vertex(name = 'V_2147',
particles = [ P.t__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2148 = Vertex(name = 'V_2148',
particles = [ P.t__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_847})
V_2149 = Vertex(name = 'V_2149',
particles = [ P.t__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3726})
V_2150 = Vertex(name = 'V_2150',
particles = [ P.t__tilde__, P.t, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3732})
V_2151 = Vertex(name = 'V_2151',
particles = [ P.t__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_37})
V_2152 = Vertex(name = 'V_2152',
particles = [ P.t__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_27,(0,0):C.GC_847})
V_2153 = Vertex(name = 'V_2153',
particles = [ P.t__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3726})
V_2154 = Vertex(name = 'V_2154',
particles = [ P.t__tilde__, P.t, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3732})
V_2155 = Vertex(name = 'V_2155',
particles = [ P.u__tilde__, P.d, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1159,(0,4):C.GC_1252,(0,2):C.GC_1251,(0,3):C.GC_1251,(0,0):C.GC_1214,(0,1):C.GC_1250})
V_2156 = Vertex(name = 'V_2156',
particles = [ P.u__tilde__, P.d, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1279,(0,4):C.GC_1303,(0,2):C.GC_1302,(0,3):C.GC_1302,(0,0):C.GC_1298,(0,1):C.GC_1301})
V_2157 = Vertex(name = 'V_2157',
particles = [ P.c__tilde__, P.d, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1856,(0,4):C.GC_1926,(0,2):C.GC_1925,(0,3):C.GC_1925,(0,0):C.GC_1927,(0,1):C.GC_1924})
V_2158 = Vertex(name = 'V_2158',
particles = [ P.c__tilde__, P.d, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1995,(0,4):C.GC_2007,(0,2):C.GC_2006,(0,3):C.GC_2006,(0,0):C.GC_2008,(0,1):C.GC_2005})
V_2159 = Vertex(name = 'V_2159',
particles = [ P.t__tilde__, P.d, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2747,(0,4):C.GC_2846,(0,2):C.GC_2845,(0,3):C.GC_2845,(0,0):C.GC_2805,(0,1):C.GC_2844})
V_2160 = Vertex(name = 'V_2160',
particles = [ P.t__tilde__, P.d, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2896,(0,4):C.GC_2925,(0,2):C.GC_2924,(0,3):C.GC_2924,(0,0):C.GC_2921,(0,1):C.GC_2923})
V_2161 = Vertex(name = 'V_2161',
particles = [ P.u__tilde__, P.d, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1159,(0,4):C.GC_1255,(0,2):C.GC_1254,(0,3):C.GC_1254,(0,0):C.GC_1215,(0,1):C.GC_1253})
V_2162 = Vertex(name = 'V_2162',
particles = [ P.u__tilde__, P.d, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1279,(0,4):C.GC_1306,(0,2):C.GC_1305,(0,3):C.GC_1305,(0,0):C.GC_1299,(0,1):C.GC_1304})
V_2163 = Vertex(name = 'V_2163',
particles = [ P.c__tilde__, P.d, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1856,(0,4):C.GC_1930,(0,2):C.GC_1929,(0,3):C.GC_1929,(0,0):C.GC_1931,(0,1):C.GC_1928})
V_2164 = Vertex(name = 'V_2164',
particles = [ P.c__tilde__, P.d, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1995,(0,4):C.GC_2011,(0,2):C.GC_2010,(0,3):C.GC_2010,(0,0):C.GC_2012,(0,1):C.GC_2009})
V_2165 = Vertex(name = 'V_2165',
particles = [ P.t__tilde__, P.d, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2747,(0,4):C.GC_2849,(0,2):C.GC_2848,(0,3):C.GC_2848,(0,0):C.GC_2806,(0,1):C.GC_2847})
V_2166 = Vertex(name = 'V_2166',
particles = [ P.t__tilde__, P.d, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2896,(0,4):C.GC_2928,(0,2):C.GC_2927,(0,3):C.GC_2927,(0,0):C.GC_2922,(0,1):C.GC_2926})
V_2167 = Vertex(name = 'V_2167',
particles = [ P.u__tilde__, P.d, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1159,(0,4):C.GC_1272,(0,2):C.GC_1271,(0,3):C.GC_1271,(0,0):C.GC_1230,(0,1):C.GC_1270})
V_2168 = Vertex(name = 'V_2168',
particles = [ P.u__tilde__, P.d, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1279,(0,4):C.GC_1309,(0,2):C.GC_1308,(0,3):C.GC_1308,(0,0):C.GC_1300,(0,1):C.GC_1307})
V_2169 = Vertex(name = 'V_2169',
particles = [ P.c__tilde__, P.d, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1856,(0,4):C.GC_1979,(0,2):C.GC_1978,(0,3):C.GC_1978,(0,0):C.GC_1980,(0,1):C.GC_1977})
V_2170 = Vertex(name = 'V_2170',
particles = [ P.c__tilde__, P.d, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1995,(0,4):C.GC_2024,(0,2):C.GC_2023,(0,3):C.GC_2023,(0,0):C.GC_2025,(0,1):C.GC_2022})
V_2171 = Vertex(name = 'V_2171',
particles = [ P.t__tilde__, P.d, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2747,(0,4):C.GC_2871,(0,2):C.GC_2870,(0,3):C.GC_2870,(0,0):C.GC_2868,(0,1):C.GC_2869})
V_2172 = Vertex(name = 'V_2172',
particles = [ P.t__tilde__, P.d, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2896,(0,4):C.GC_2932,(0,2):C.GC_2931,(0,3):C.GC_2931,(0,0):C.GC_2929,(0,1):C.GC_2930})
V_2173 = Vertex(name = 'V_2173',
particles = [ P.u__tilde__, P.s, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1310,(0,4):C.GC_1416,(0,2):C.GC_1415,(0,3):C.GC_1415,(0,0):C.GC_1377,(0,1):C.GC_1414})
V_2174 = Vertex(name = 'V_2174',
particles = [ P.u__tilde__, P.s, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1448,(0,4):C.GC_1478,(0,2):C.GC_1477,(0,3):C.GC_1477,(0,0):C.GC_1473,(0,1):C.GC_1476})
V_2175 = Vertex(name = 'V_2175',
particles = [ P.c__tilde__, P.s, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2032,(0,4):C.GC_2101,(0,2):C.GC_2100,(0,3):C.GC_2100,(0,0):C.GC_2140,(0,1):C.GC_2099})
V_2176 = Vertex(name = 'V_2176',
particles = [ P.c__tilde__, P.s, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2188,(0,4):C.GC_2203,(0,2):C.GC_2202,(0,3):C.GC_2202,(0,0):C.GC_2207,(0,1):C.GC_2201})
V_2177 = Vertex(name = 'V_2177',
particles = [ P.t__tilde__, P.s, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3097,(0,4):C.GC_3199,(0,2):C.GC_3198,(0,3):C.GC_3198,(0,0):C.GC_3165,(0,1):C.GC_3197})
V_2178 = Vertex(name = 'V_2178',
particles = [ P.t__tilde__, P.s, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3262,(0,4):C.GC_3294,(0,2):C.GC_3293,(0,3):C.GC_3293,(0,0):C.GC_3290,(0,1):C.GC_3292})
V_2179 = Vertex(name = 'V_2179',
particles = [ P.u__tilde__, P.s, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1310,(0,4):C.GC_1419,(0,2):C.GC_1418,(0,3):C.GC_1418,(0,0):C.GC_1378,(0,1):C.GC_1417})
V_2180 = Vertex(name = 'V_2180',
particles = [ P.u__tilde__, P.s, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1448,(0,4):C.GC_1481,(0,2):C.GC_1480,(0,3):C.GC_1480,(0,0):C.GC_1474,(0,1):C.GC_1479})
V_2181 = Vertex(name = 'V_2181',
particles = [ P.c__tilde__, P.s, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2032,(0,4):C.GC_2104,(0,2):C.GC_2103,(0,3):C.GC_2103,(0,0):C.GC_2141,(0,1):C.GC_2102})
V_2182 = Vertex(name = 'V_2182',
particles = [ P.c__tilde__, P.s, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2188,(0,4):C.GC_2206,(0,2):C.GC_2205,(0,3):C.GC_2205,(0,0):C.GC_2208,(0,1):C.GC_2204})
V_2183 = Vertex(name = 'V_2183',
particles = [ P.t__tilde__, P.s, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3097,(0,4):C.GC_3202,(0,2):C.GC_3201,(0,3):C.GC_3201,(0,0):C.GC_3166,(0,1):C.GC_3200})
V_2184 = Vertex(name = 'V_2184',
particles = [ P.t__tilde__, P.s, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3262,(0,4):C.GC_3297,(0,2):C.GC_3296,(0,3):C.GC_3296,(0,0):C.GC_3291,(0,1):C.GC_3295})
V_2185 = Vertex(name = 'V_2185',
particles = [ P.u__tilde__, P.s, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1310,(0,4):C.GC_1441,(0,2):C.GC_1440,(0,3):C.GC_1440,(0,0):C.GC_1391,(0,1):C.GC_1439})
V_2186 = Vertex(name = 'V_2186',
particles = [ P.u__tilde__, P.s, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1448,(0,4):C.GC_1484,(0,2):C.GC_1483,(0,3):C.GC_1483,(0,0):C.GC_1475,(0,1):C.GC_1482})
V_2187 = Vertex(name = 'V_2187',
particles = [ P.c__tilde__, P.s, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2032,(0,4):C.GC_2168,(0,2):C.GC_2167,(0,3):C.GC_2167,(0,0):C.GC_2169,(0,1):C.GC_2166})
V_2188 = Vertex(name = 'V_2188',
particles = [ P.c__tilde__, P.s, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2188,(0,4):C.GC_2220,(0,2):C.GC_2219,(0,3):C.GC_2219,(0,0):C.GC_2221,(0,1):C.GC_2218})
V_2189 = Vertex(name = 'V_2189',
particles = [ P.t__tilde__, P.s, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3097,(0,4):C.GC_3235,(0,2):C.GC_3234,(0,3):C.GC_3234,(0,0):C.GC_3232,(0,1):C.GC_3233})
V_2190 = Vertex(name = 'V_2190',
particles = [ P.t__tilde__, P.s, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3262,(0,4):C.GC_3301,(0,2):C.GC_3300,(0,3):C.GC_3300,(0,0):C.GC_3298,(0,1):C.GC_3299})
V_2191 = Vertex(name = 'V_2191',
particles = [ P.u__tilde__, P.b, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1485,(0,4):C.GC_1596,(0,2):C.GC_1595,(0,3):C.GC_1595,(0,0):C.GC_1546,(0,1):C.GC_1594})
V_2192 = Vertex(name = 'V_2192',
particles = [ P.u__tilde__, P.b, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1813,(0,4):C.GC_1849,(0,2):C.GC_1848,(0,3):C.GC_1848,(0,0):C.GC_1844,(0,1):C.GC_1847})
V_2193 = Vertex(name = 'V_2193',
particles = [ P.c__tilde__, P.b, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2231,(0,4):C.GC_2313,(0,2):C.GC_2312,(0,3):C.GC_2312,(0,0):C.GC_2310,(0,1):C.GC_2311})
V_2194 = Vertex(name = 'V_2194',
particles = [ P.c__tilde__, P.b, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2394,(0,4):C.GC_2554,(0,2):C.GC_2553,(0,3):C.GC_2553,(0,0):C.GC_2551,(0,1):C.GC_2552})
V_2195 = Vertex(name = 'V_2195',
particles = [ P.t__tilde__, P.b, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3529,(0,4):C.GC_3642,(0,2):C.GC_3641,(0,3):C.GC_3641,(0,0):C.GC_3591,(0,1):C.GC_3640})
V_2196 = Vertex(name = 'V_2196',
particles = [ P.t__tilde__, P.b, P.e__plus__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3934,(0,4):C.GC_3970,(0,2):C.GC_3969,(0,3):C.GC_3969,(0,0):C.GC_3966,(0,1):C.GC_3968})
V_2197 = Vertex(name = 'V_2197',
particles = [ P.u__tilde__, P.b, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1485,(0,4):C.GC_1599,(0,2):C.GC_1598,(0,3):C.GC_1598,(0,0):C.GC_1547,(0,1):C.GC_1597})
V_2198 = Vertex(name = 'V_2198',
particles = [ P.u__tilde__, P.b, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1813,(0,4):C.GC_1852,(0,2):C.GC_1851,(0,3):C.GC_1851,(0,0):C.GC_1845,(0,1):C.GC_1850})
V_2199 = Vertex(name = 'V_2199',
particles = [ P.c__tilde__, P.b, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2231,(0,4):C.GC_2317,(0,2):C.GC_2316,(0,3):C.GC_2316,(0,0):C.GC_2314,(0,1):C.GC_2315})
V_2200 = Vertex(name = 'V_2200',
particles = [ P.c__tilde__, P.b, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2394,(0,4):C.GC_2567,(0,2):C.GC_2566,(0,3):C.GC_2566,(0,0):C.GC_2564,(0,1):C.GC_2565})
V_2201 = Vertex(name = 'V_2201',
particles = [ P.t__tilde__, P.b, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3529,(0,4):C.GC_3645,(0,2):C.GC_3644,(0,3):C.GC_3644,(0,0):C.GC_3592,(0,1):C.GC_3643})
V_2202 = Vertex(name = 'V_2202',
particles = [ P.t__tilde__, P.b, P.mu__plus__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3934,(0,4):C.GC_3973,(0,2):C.GC_3972,(0,3):C.GC_3972,(0,0):C.GC_3967,(0,1):C.GC_3971})
V_2203 = Vertex(name = 'V_2203',
particles = [ P.u__tilde__, P.b, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1485,(0,4):C.GC_1616,(0,2):C.GC_1615,(0,3):C.GC_1615,(0,0):C.GC_1566,(0,1):C.GC_1614})
V_2204 = Vertex(name = 'V_2204',
particles = [ P.u__tilde__, P.b, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_1813,(0,4):C.GC_1855,(0,2):C.GC_1854,(0,3):C.GC_1854,(0,0):C.GC_1846,(0,1):C.GC_1853})
V_2205 = Vertex(name = 'V_2205',
particles = [ P.c__tilde__, P.b, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2231,(0,4):C.GC_2369,(0,2):C.GC_2368,(0,3):C.GC_2368,(0,0):C.GC_2366,(0,1):C.GC_2367})
V_2206 = Vertex(name = 'V_2206',
particles = [ P.c__tilde__, P.b, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_2394,(0,4):C.GC_2628,(0,2):C.GC_2627,(0,3):C.GC_2627,(0,0):C.GC_2625,(0,1):C.GC_2626})
V_2207 = Vertex(name = 'V_2207',
particles = [ P.t__tilde__, P.b, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3529,(0,4):C.GC_3667,(0,2):C.GC_3666,(0,3):C.GC_3666,(0,0):C.GC_3664,(0,1):C.GC_3665})
V_2208 = Vertex(name = 'V_2208',
particles = [ P.t__tilde__, P.b, P.ta__plus__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF10, L.FFFF11, L.FFFF18, L.FFFF19, L.FFFF20, L.FFFF4 ],
couplings = {(0,5):C.GC_3934,(0,4):C.GC_3989,(0,2):C.GC_3988,(0,3):C.GC_3988,(0,0):C.GC_3986,(0,1):C.GC_3987})
V_2209 = Vertex(name = 'V_2209',
particles = [ P.d__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2210 = Vertex(name = 'V_2210',
particles = [ P.d__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_557})
V_2211 = Vertex(name = 'V_2211',
particles = [ P.d__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2685})
V_2212 = Vertex(name = 'V_2212',
particles = [ P.d__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2686})
V_2213 = Vertex(name = 'V_2213',
particles = [ P.d__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2214 = Vertex(name = 'V_2214',
particles = [ P.d__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_557})
V_2215 = Vertex(name = 'V_2215',
particles = [ P.d__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2685})
V_2216 = Vertex(name = 'V_2216',
particles = [ P.d__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2686})
V_2217 = Vertex(name = 'V_2217',
particles = [ P.d__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2218 = Vertex(name = 'V_2218',
particles = [ P.d__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_557})
V_2219 = Vertex(name = 'V_2219',
particles = [ P.d__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2685})
V_2220 = Vertex(name = 'V_2220',
particles = [ P.d__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2686})
V_2221 = Vertex(name = 'V_2221',
particles = [ P.s__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2957})
V_2222 = Vertex(name = 'V_2222',
particles = [ P.s__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2960})
V_2223 = Vertex(name = 'V_2223',
particles = [ P.s__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2957})
V_2224 = Vertex(name = 'V_2224',
particles = [ P.s__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2960})
V_2225 = Vertex(name = 'V_2225',
particles = [ P.s__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2957})
V_2226 = Vertex(name = 'V_2226',
particles = [ P.s__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2960})
V_2227 = Vertex(name = 'V_2227',
particles = [ P.b__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2958})
V_2228 = Vertex(name = 'V_2228',
particles = [ P.b__tilde__, P.d, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2962})
V_2229 = Vertex(name = 'V_2229',
particles = [ P.b__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2958})
V_2230 = Vertex(name = 'V_2230',
particles = [ P.b__tilde__, P.d, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2962})
V_2231 = Vertex(name = 'V_2231',
particles = [ P.b__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2958})
V_2232 = Vertex(name = 'V_2232',
particles = [ P.b__tilde__, P.d, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_2962})
V_2233 = Vertex(name = 'V_2233',
particles = [ P.d__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3336})
V_2234 = Vertex(name = 'V_2234',
particles = [ P.d__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3340})
V_2235 = Vertex(name = 'V_2235',
particles = [ P.d__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3336})
V_2236 = Vertex(name = 'V_2236',
particles = [ P.d__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3340})
V_2237 = Vertex(name = 'V_2237',
particles = [ P.d__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3336})
V_2238 = Vertex(name = 'V_2238',
particles = [ P.d__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3340})
V_2239 = Vertex(name = 'V_2239',
particles = [ P.s__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2240 = Vertex(name = 'V_2240',
particles = [ P.s__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_740})
V_2241 = Vertex(name = 'V_2241',
particles = [ P.s__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3337})
V_2242 = Vertex(name = 'V_2242',
particles = [ P.s__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3342})
V_2243 = Vertex(name = 'V_2243',
particles = [ P.s__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2244 = Vertex(name = 'V_2244',
particles = [ P.s__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_740})
V_2245 = Vertex(name = 'V_2245',
particles = [ P.s__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3337})
V_2246 = Vertex(name = 'V_2246',
particles = [ P.s__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3342})
V_2247 = Vertex(name = 'V_2247',
particles = [ P.s__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2248 = Vertex(name = 'V_2248',
particles = [ P.s__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_740})
V_2249 = Vertex(name = 'V_2249',
particles = [ P.s__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3337})
V_2250 = Vertex(name = 'V_2250',
particles = [ P.s__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3342})
V_2251 = Vertex(name = 'V_2251',
particles = [ P.b__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3338})
V_2252 = Vertex(name = 'V_2252',
particles = [ P.b__tilde__, P.s, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3344})
V_2253 = Vertex(name = 'V_2253',
particles = [ P.b__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3338})
V_2254 = Vertex(name = 'V_2254',
particles = [ P.b__tilde__, P.s, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3344})
V_2255 = Vertex(name = 'V_2255',
particles = [ P.b__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3338})
V_2256 = Vertex(name = 'V_2256',
particles = [ P.b__tilde__, P.s, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_3344})
V_2257 = Vertex(name = 'V_2257',
particles = [ P.d__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4096})
V_2258 = Vertex(name = 'V_2258',
particles = [ P.d__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4100})
V_2259 = Vertex(name = 'V_2259',
particles = [ P.d__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4096})
V_2260 = Vertex(name = 'V_2260',
particles = [ P.d__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4100})
V_2261 = Vertex(name = 'V_2261',
particles = [ P.d__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4096})
V_2262 = Vertex(name = 'V_2262',
particles = [ P.d__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4100})
V_2263 = Vertex(name = 'V_2263',
particles = [ P.s__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4097})
V_2264 = Vertex(name = 'V_2264',
particles = [ P.s__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4102})
V_2265 = Vertex(name = 'V_2265',
particles = [ P.s__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4097})
V_2266 = Vertex(name = 'V_2266',
particles = [ P.s__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4102})
V_2267 = Vertex(name = 'V_2267',
particles = [ P.s__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4097})
V_2268 = Vertex(name = 'V_2268',
particles = [ P.s__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4102})
V_2269 = Vertex(name = 'V_2269',
particles = [ P.b__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2270 = Vertex(name = 'V_2270',
particles = [ P.b__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_451})
V_2271 = Vertex(name = 'V_2271',
particles = [ P.b__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4098})
V_2272 = Vertex(name = 'V_2272',
particles = [ P.b__tilde__, P.b, P.ve__tilde__, P.ve ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4104})
V_2273 = Vertex(name = 'V_2273',
particles = [ P.b__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2274 = Vertex(name = 'V_2274',
particles = [ P.b__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_451})
V_2275 = Vertex(name = 'V_2275',
particles = [ P.b__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4098})
V_2276 = Vertex(name = 'V_2276',
particles = [ P.b__tilde__, P.b, P.vm__tilde__, P.vm ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4104})
V_2277 = Vertex(name = 'V_2277',
particles = [ P.b__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_25,(0,0):C.GC_21})
V_2278 = Vertex(name = 'V_2278',
particles = [ P.b__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF12, L.FFFF4 ],
couplings = {(0,1):C.GC_26,(0,0):C.GC_451})
V_2279 = Vertex(name = 'V_2279',
particles = [ P.b__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4098})
V_2280 = Vertex(name = 'V_2280',
particles = [ P.b__tilde__, P.b, P.vt__tilde__, P.vt ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFFF4 ],
couplings = {(0,0):C.GC_4104})
V_2281 = Vertex(name = 'V_2281',
particles = [ P.s__tilde__, P.d, P.s__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_1952,(2,0):C.GC_1955,(1,2):C.GC_1952,(3,2):C.GC_1955,(1,1):C.GC_1952,(3,1):C.GC_1955,(0,3):C.GC_1952,(2,3):C.GC_1955})
V_2282 = Vertex(name = 'V_2282',
particles = [ P.b__tilde__, P.d, P.s__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_2792,(2,0):C.GC_2795,(1,2):C.GC_1953,(3,2):C.GC_1956,(1,1):C.GC_2792,(3,1):C.GC_2795,(0,3):C.GC_1953,(2,3):C.GC_1956})
V_2283 = Vertex(name = 'V_2283',
particles = [ P.s__tilde__, P.b, P.s__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_1933,(2,0):C.GC_1936,(1,2):C.GC_1933,(3,2):C.GC_1936,(1,1):C.GC_2337,(3,1):C.GC_2340,(0,3):C.GC_2337,(2,3):C.GC_2340})
V_2284 = Vertex(name = 'V_2284',
particles = [ P.b__tilde__, P.d, P.b__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_2793,(2,0):C.GC_2796,(1,2):C.GC_2793,(3,2):C.GC_2796,(1,1):C.GC_2793,(3,1):C.GC_2796,(0,3):C.GC_2793,(2,3):C.GC_2796})
V_2285 = Vertex(name = 'V_2285',
particles = [ P.b__tilde__, P.s, P.b__tilde__, P.d ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_2809,(2,0):C.GC_2812,(1,2):C.GC_2809,(3,2):C.GC_2812,(1,1):C.GC_3143,(3,1):C.GC_3146,(0,3):C.GC_3143,(2,3):C.GC_3146})
V_2286 = Vertex(name = 'V_2286',
particles = [ P.d__tilde__, P.s, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_1371,(2,0):C.GC_1374,(1,2):C.GC_1371,(3,2):C.GC_1374,(1,1):C.GC_1371,(3,1):C.GC_1374,(0,3):C.GC_1371,(2,3):C.GC_1374})
V_2287 = Vertex(name = 'V_2287',
particles = [ P.b__tilde__, P.s, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_3147,(2,0):C.GC_3150,(1,2):C.GC_1373,(3,2):C.GC_1376,(1,1):C.GC_3147,(3,1):C.GC_3150,(0,3):C.GC_1373,(2,3):C.GC_1376})
V_2288 = Vertex(name = 'V_2288',
particles = [ P.d__tilde__, P.b, P.d__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_1347,(2,0):C.GC_1350,(1,2):C.GC_1347,(3,2):C.GC_1350,(1,1):C.GC_1548,(3,1):C.GC_1551,(0,3):C.GC_1548,(2,3):C.GC_1551})
V_2289 = Vertex(name = 'V_2289',
particles = [ P.b__tilde__, P.s, P.b__tilde__, P.s ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_3149,(2,0):C.GC_3152,(1,2):C.GC_3149,(3,2):C.GC_3152,(1,1):C.GC_3149,(3,1):C.GC_3152,(0,3):C.GC_3149,(2,3):C.GC_3152})
V_2290 = Vertex(name = 'V_2290',
particles = [ P.d__tilde__, P.b, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_1534,(2,0):C.GC_1537,(1,2):C.GC_1534,(3,2):C.GC_1537,(1,1):C.GC_1534,(3,1):C.GC_1537,(0,3):C.GC_1534,(2,3):C.GC_1537})
V_2291 = Vertex(name = 'V_2291',
particles = [ P.s__tilde__, P.b, P.d__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_2318,(2,0):C.GC_2321,(1,2):C.GC_1535,(3,2):C.GC_1538,(1,1):C.GC_2318,(3,1):C.GC_2321,(0,3):C.GC_1535,(2,3):C.GC_1538})
V_2292 = Vertex(name = 'V_2292',
particles = [ P.s__tilde__, P.b, P.s__tilde__, P.b ],
color = [ 'Identity(1,2)*Identity(3,4)', 'Identity(1,4)*Identity(2,3)', 'T(-1,2,1)*T(-1,4,3)', 'T(-1,2,3)*T(-1,4,1)' ],
lorentz = [ L.FFFF12, L.FFFF13, L.FFFF15, L.FFFF16 ],
couplings = {(0,0):C.GC_2319,(2,0):C.GC_2322,(1,2):C.GC_2319,(3,2):C.GC_2322,(1,1):C.GC_2319,(3,1):C.GC_2322,(0,3):C.GC_2319,(2,3):C.GC_2322})
V_2293 = Vertex(name = 'V_2293',
particles = [ P.a, P.a, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_208})
V_2294 = Vertex(name = 'V_2294',
particles = [ P.g, P.g, P.H1 ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.VVS4, L.VVS5, L.VVS6, L.VVS7 ],
couplings = {(0,0):C.GC_209,(0,2):C.GC_222,(0,1):C.GC_218,(0,3):C.GC_213})
V_2295 = Vertex(name = 'V_2295',
particles = [ P.a, P.Z, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_212})
V_2296 = Vertex(name = 'V_2296',
particles = [ P.a, P.Z1, P.H ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_212})
V_2297 = Vertex(name = 'V_2297',
particles = [ P.a, P.Z1, P.H1 ],
color = [ '1' ],
lorentz = [ L.VVS4 ],
couplings = {(0,0):C.GC_226})
V_2298 = Vertex(name = 'V_2298',
particles = [ P.g, P.g, P.g, P.H1 ],
color = [ 'f(1,2,3)' ],
lorentz = [ L.VVVS3, L.VVVS4, L.VVVS5, L.VVVS6, L.VVVS7 ],
couplings = {(0,2):C.GC_214,(0,4):C.GC_223,(0,3):C.GC_219,(0,1):C.GC_216,(0,0):C.GC_210})
V_2299 = Vertex(name = 'V_2299',
particles = [ P.g, P.g, P.g, P.g, P.H1 ],
color = [ 'f(-1,1,2)*f(-1,3,4)', 'f(-1,1,3)*f(-1,2,4)', 'f(-1,1,4)*f(-1,2,3)' ],
lorentz = [ L.VVVVS1, L.VVVVS10, L.VVVVS11, L.VVVVS12, L.VVVVS13, L.VVVVS14, L.VVVVS15, L.VVVVS16, L.VVVVS18, L.VVVVS2, L.VVVVS20, L.VVVVS3, L.VVVVS5, L.VVVVS7, L.VVVVS8 ],
couplings = {(2,5):C.GC_215,(2,8):C.GC_224,(1,4):C.GC_215,(1,10):C.GC_224,(2,6):C.GC_221,(0,11):C.GC_217,(0,12):C.GC_225,(1,7):C.GC_221,(0,3):C.GC_220,(1,2):C.GC_217,(2,1):C.GC_217,(0,9):C.GC_215,(1,13):C.GC_211,(0,0):C.GC_211,(2,14):C.GC_211})
| 47.504708
| 4,075
| 0.471911
| 104,383
| 655,850
| 2.694912
| 0.043312
| 0.068894
| 0.035847
| 0.034625
| 0.728574
| 0.726708
| 0.725918
| 0.721421
| 0.717867
| 0.712772
| 0
| 0.158016
| 0.318268
| 655,850
| 13,805
| 4,076
| 47.508149
| 0.471137
| 0.000235
| 0
| 0.607096
| 0
| 0.010436
| 0.107157
| 0.050484
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000348
| 0
| 0.000348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4f51e73d5b511f74964999147d7b7e8783a60b9
| 6,633
|
py
|
Python
|
tests/widgets/test_lus_simulator.py
|
NMontanaBrown/scikit-surgeryvtk
|
85921775b72f40cdf4ee606ab83531758b0345bb
|
[
"BSD-3-Clause"
] | 1
|
2020-10-16T13:50:36.000Z
|
2020-10-16T13:50:36.000Z
|
tests/widgets/test_lus_simulator.py
|
NMontanaBrown/scikit-surgeryvtk
|
85921775b72f40cdf4ee606ab83531758b0345bb
|
[
"BSD-3-Clause"
] | null | null | null |
tests/widgets/test_lus_simulator.py
|
NMontanaBrown/scikit-surgeryvtk
|
85921775b72f40cdf4ee606ab83531758b0345bb
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import pytest
import cv2
import numpy as np
import sksurgeryvtk.widgets.vtk_lus_simulator as lus
def test_basic_rendering_generator(setup_vtk_err):
_, app = setup_vtk_err
model_file = "tests/data/lus/test_data.json"
background_file = "tests/data/rendering/background-960-x-540-black.png"
intrinsics_file = "tests/data/liver/calib.left.intrinsics.halved.txt"
reference_l2c_file = "tests/data/lus/spp_liver2camera.txt"
reference_p2c_file = "tests/data/lus/spp_probe2camera.txt"
generator = lus.VTKLUSSimulator(model_file,
background_file,
intrinsics_file,
reference_l2c_file,
reference_p2c_file)
# First generate image at reference pose exactly.
l2c, p2c, angle, position = generator.set_pose([0, 0, 0, 0, 0, 0], # anatomy rx, ry, rz, tx, ty, tz
[0, 0, 0, 0, 0, 0], # probe rx, ry, rz, tx, ty, tz
0,
None
)
generator.show()
generator.setFixedSize(960, 540)
image = generator.get_image()
cv2.imwrite('tests/output/lus_refererence_posn_image.png', image)
masks = generator.get_masks()
for mask in masks.keys():
cv2.imwrite('tests/output/lus_refererence_posn_mask_' + mask + '.png',
masks[mask]
)
print("test_basic_rendering_generator: ref l2c=" + str(l2c))
print("test_basic_rendering_generator: ref p2c=" + str(p2c))
print("test_basic_rendering_generator: ref angle=" + str(angle))
print("test_basic_rendering_generator: ref position=" + str(position))
# Now try another pose.
l2c, p2c, angle, position = generator.set_pose([20, 30, 40, 5, 10, 15], # anatomy rx, ry, rz, tx, ty, tz
[2, 3, 4, 5, 6, 7], # probe rx, ry, rz, tx, ty, tz
-20,
[10.97657775878900566, -80.58924865722650566, -27.99212646484369316]
)
print("test_basic_rendering_generator: alt l2c=" + str(l2c))
print("test_basic_rendering_generator: alt p2c=" + str(p2c))
print("test_basic_rendering_generator: alt angle=" + str(angle))
print("test_basic_rendering_generator: alt position=" + str(position))
image = generator.get_image()
cv2.imwrite('tests/output/lus_alternative_posn_image.png', image)
masks = generator.get_masks()
for mask in masks.keys():
cv2.imwrite('tests/output/lus_alternative_posn_mask_' + mask + '.png',
masks[mask]
)
def test_matrices_rendering_generator(setup_vtk_err):
"""
Testing rendering generator returns the same images if matrix
used or params used.
"""
_, app = setup_vtk_err
model_file = "tests/data/lus/test_data.json"
background_file = "tests/data/rendering/background-960-x-540-black.png"
intrinsics_file = "tests/data/liver/calib.left.intrinsics.halved.txt"
reference_l2c_file = "tests/data/lus/spp_liver2camera.txt"
reference_p2c_file = "tests/data/lus/spp_liver2camera.txt"
generator = lus.VTKLUSSimulator(model_file,
background_file,
intrinsics_file,
reference_l2c_file,
reference_p2c_file)
# First generate image at reference pose exactly.
l2c, p2c, angle, position = generator.set_pose([0, 0, 0, 0, 0, 0], # anatomy rx, ry, rz, tx, ty, tz
[0, 0, 0, 0, 0, 0], # probe rx, ry, rz, tx, ty, tz
0,
None
)
generator.show()
generator.setFixedSize(960, 540)
image = generator.get_image()
cv2.imwrite('tests/output/lus_refererence_posn_image.png', image)
masks = generator.get_masks()
for mask in masks.keys():
cv2.imwrite('tests/output/lus_refererence_posn_mask_' + mask + '.png',
masks[mask]
)
# Check that the resulting masks with set_pose_with_matrices method are the same
generator.set_pose_with_matrices(p2c, l2c, angle)
generator.show()
generator.setFixedSize(960, 540)
image_w_matrix = generator.get_image()
cv2.imwrite('tests/output/lus_refererence_posn_image_w_matrices.png', image)
masks_w_matrix = generator.get_masks()
for mask in masks_w_matrix.keys():
cv2.imwrite('tests/output/lus_refererence_posn_mask_' + mask + '_w_matrices.png',
masks_w_matrix[mask]
)
assert np.allclose(masks_w_matrix[mask], masks[mask])
assert np.allclose(image, image_w_matrix)
# Now try another pose.
l2c, p2c, angle, position = generator.set_pose([20, 30, 40, 5, 10, 15], # anatomy rx, ry, rz, tx, ty, tz
[2, 3, 4, 5, 6, 7], # probe rx, ry, rz, tx, ty, tz
-20,
[10.97657775878900566, -80.58924865722650566, -27.99212646484369316]
)
generator.show()
generator.setFixedSize(960, 540)
image = generator.get_image()
cv2.imwrite('tests/output/lus_refererence_posn_image.png', image)
masks = generator.get_masks()
for mask in masks.keys():
cv2.imwrite('tests/output/lus_refererence_posn_mask_' + mask + '.png',
masks[mask]
)
# Check that the resulting masks with set_pose_with_matrices method are the same
generator.set_pose_with_matrices(p2c, l2c, angle)
generator.show()
generator.setFixedSize(960, 540)
image_w_matrix = generator.get_image()
cv2.imwrite('tests/output/lus_refererence_posn_image_w_matrices.png', image)
masks_w_matrix = generator.get_masks()
for mask in masks_w_matrix.keys():
cv2.imwrite('tests/output/lus_refererence_posn_mask_' + mask + '_w_matrices.png',
masks_w_matrix[mask]
)
assert np.allclose(masks_w_matrix[mask], masks[mask])
assert np.allclose(image, image_w_matrix)
| 43.071429
| 119
| 0.56822
| 764
| 6,633
| 4.700262
| 0.158377
| 0.011139
| 0.013367
| 0.013367
| 0.934002
| 0.91785
| 0.887775
| 0.877193
| 0.806461
| 0.793651
| 0
| 0.060072
| 0.332429
| 6,633
| 153
| 120
| 43.352941
| 0.750903
| 0.09694
| 0
| 0.754386
| 0
| 0
| 0.216924
| 0.194762
| 0
| 0
| 0
| 0
| 0.035088
| 1
| 0.017544
| false
| 0
| 0.035088
| 0
| 0.052632
| 0.070175
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f20383808f68e09bb3cd623e366072d85bb84596
| 23,873
|
py
|
Python
|
app/data_utils/util_data.py
|
jasonwangwatercup/mktdata_gathering
|
410a4c0db33fcfa72e72218da6fa71f894337913
|
[
"MIT"
] | 3
|
2019-02-28T09:13:41.000Z
|
2020-07-13T05:32:00.000Z
|
app/data_utils/util_data.py
|
jasonwangwatercup/mktdata_gathering
|
410a4c0db33fcfa72e72218da6fa71f894337913
|
[
"MIT"
] | null | null | null |
app/data_utils/util_data.py
|
jasonwangwatercup/mktdata_gathering
|
410a4c0db33fcfa72e72218da6fa71f894337913
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pdb
import sqlite3
MAX_LINE_TO_PRINT = 30
exchange_list = ('zzzz', 'xxxx', 'yyyy', 'wwww', 'xxxx_futopt')
ins_list = None
ins_tmp_list = None
def generate_params(add_fo=False):
global exchange_list, ins_list, ins_tmp_list, sqls_daily, sqls_ins
exchange_list = [ str(i).strip().lower() for i in exchange_list]
ins_list = [i.upper()+'ins' for i in exchange_list]
ins_tmp_list = ['tmp_' + i.upper() + 'ins' for i in exchange_list]
if add_fo is True:
exchange_list.append('xxxx_futopt')
def exchange_params(exchange_name, structs):
global exchange_list
assert len(exchange_list) == len(structs)
exchange_name = str(exchange_name).strip().lower()
try:
idx = exchange_list.index(exchange_name)
except ValueError as e:
print "%s not in exchange_list, PLEASE have a look!" % exchange_name
pdb.set_trace()
return structs[idx]
def sp_insert_diff_print2 (cur, out_list, dict_, name_sp=None, fun_print=None):
global MAX_LINE_TO_PRINT
if len(out_list) > 0:
if name_sp is not None:
cur.execute("SAVEPOINT 'sp_sidp2';")
cur.execute(dict_['create_tmp'])
try:
cur.execute(dict_['delete_tmp'])
except sqlite3.Error as err:
print "Failed deletion: %s." % repr(err)
pdb.set_trace()
try:
cur.executemany(dict_['insert_tmp'], out_list)
except sqlite3.Error as err:
print "Failed insertion: %s | %s." % (dict_['insert_tmp'], repr(err))
pdb.set_trace()
tmpp = cur.execute(dict_['diff_tmp']).fetchall()
if 0 >= len(tmpp):
print "No bothering continuing since no changes at all. RETURN."
pdb.set_trace()
return
elif MAX_LINE_TO_PRINT < len(tmpp):
cur.execute(dict_['diff_tmp_grouped'])
tmpp = cur.fetchall()
print "Possible changes:"
for ii in tmpp:
print repr(ii)
print "WILL the replacement be REASONABLE??!! .. PLS have a THOUROUGH look.!"
print "NEVER REPLACE ANYTHING if you do NOT know the possible results."
pdb.set_trace()
try:
cur.execute(dict_['insert'])
except sqlite3.Error as err:
print "Insert failed: %s." % (repr(err),)
pdb.set_trace()
print "After insertion total counts: %s\n" % cur.execute(dict_['print_count']).fetchone()
print "Print *last 30* records:"
tmp_print = cur.execute(dict_['limited_print']).fetchall()
if 0 < len(tmp_print) and fun_print is None:
for i in tmp_print:
print repr(i)
elif 0 < len(tmp_print) and fun_print is not None:
fun_print(tmp_print)
print "Finish inserting. AND PLS DO NOT FORGET TO RELEASE THE SAVEPOINT if necessary."
def update_daily(cur, out_list, exchange_name, name_sp=None, fun_print=None):
global sqls_daily
dict_in = exchange_params(exchange_name, sqls_daily)
sp_insert_diff_print2(cur, out_list, dict_in, name_sp, fun_print)
def update_ins(cur, out_list, exchange_name, name_sp=None, fun_print=None):
global sqls_ins
dict_in = exchange_params(exchange_name, sqls_ins)
sp_insert_diff_print2(cur, out_list, dict_in, name_sp, fun_print)
sqls_sidp2 = {'create_tmp': None, 'delete_tmp': None, 'insert_tmp': None,
'diff_tmp': None, 'insert_replace': None, 'diff_tmp_grouped': None,
'print_count': None, 'limited_print': None, 'insert_ignore': None, 'insert': None}
sqls_ins_zzzz = {
'create_tmp':
""" CREATE TEMP TABLE IF NOT EXISTS tmp_ZZZZins (
"instrumentid" TEXT UNIQUE NOT NULL,
"date_listed" INT NOT NULL,
"date_last" INT NOT NULL,
"date_delivery_first" INT NOT NULL,
"date_delivery_last" INT NOT NULL,
"price_listed" FLOAT NOT NULL); """,
'delete_tmp':
'DELETE from "tmp_ZZZZins";',
'insert_tmp':
'INSERT INTO "tmp_ZZZZins" ("instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed") values (?, ?, ?, ?, ?, ?);',
'diff_tmp':
""" select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed" from "tmp_ZZZZins" except select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed" from "ZZZZins"; """,
'insert_replace':
""" INSERT OR REPLACE INTO "ZZZZins" (
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed", "manual_flag", "update_time"
) select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed", 0, datetime()
from ( select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed"
from "tmp_ZZZZins" except select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed" from "ZZZZins") ORDER BY "date_listed"; """,
'diff_tmp_grouped':
""" select "date_listed", count(*) "cnt" from (select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed"
from tmp_ZZZZins except select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed"
from "ZZZZins") group by "date_listed" order by "date_listed"; """,
'print_count':
'select count(*) from "ZZZZins";',
'limited_print':"""
select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed", "manual_flag", "update_time"
from "ZZZZins" order by "update_time" desc limit 30;""",
'insert_ignore':
""" INSERT OR IGNORE INTO "ZZZZins" (
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed", "manual_flag", "update_time")
select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed", 0 , datetime()
from "tmp_ZZZZins"; """,
'insert':"""
INSERT INTO "ZZZZins" (
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed", "manual_flag", "update_time"
) select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed", 0, datetime()
from ( select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed"
from "tmp_ZZZZins" except select
"instrumentid", "date_listed", "date_last", "date_delivery_first", "date_delivery_last", "price_listed" from "ZZZZins") ORDER BY "date_listed";
""",
}
##
sqls_ins_xxxx = {}
##
sqls_ins_yyyy = {
'create_tmp':
""" CREATE TEMP TABLE IF NOT EXISTS "tmp_YYYYins" (
"instrumentid" TEXT UNIQUE NOT NULL,
"lot" INT NOT NULL,
"min_price_diff" FLOAT NOT NULL,
"date_listed" INT NOT NULL,
"date_last" INT NOT NULL,
"date_delivery_last" INT NOT NULL); """,
'delete_tmp':
'DELETE from "tmp_YYYYins";',
'insert_tmp':
'INSERT INTO "tmp_YYYYins"("instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last") values (?, ?, ?, ?, ?, ?);',
'diff_tmp':
""" select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "tmp_YYYYins"
where "instrumentid" not in ('S0001', 'S9901', 'S9909')
except select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "YYYYins"; """,
'insert_replace':
""" INSERT OR REPLACE INTO "YYYYins"(
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last", "manual_flag", "update_time"
) select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last", 0, datetime()
from ( select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "tmp_YYYYins" except select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "YYYYins")
where "instrumentid" not in ('S0001', 'S9901', 'S9909')
ORDER BY "date_listed"; """,
'diff_tmp_grouped':
""" select "date_listed", count(*) cnt from (select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "tmp_YYYYins"
where "instrumentid" not in ('S0001', 'S9901', 'S9909')
except select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "YYYYins") group by "date_listed" order by "date_listed"; """,
'print_count':
'select count(*) from "YYYYins";',
'limited_print':"""
select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last", "manual_flag", "update_time"
from "YYYYins" order by "update_time" desc limit 30;""",
'insert_ignore':
""" INSERT OR IGNORE INTO YYYYins(
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last", "manual_flag", "update_time"
) select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last", 0, datetime()
from "tmp_YYYYins"; """,
'insert':"""
INSERT INTO "YYYYins"(
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last", "manual_flag", "update_time"
) select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last", 0, datetime()
from ( select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "tmp_YYYYins" except select
"instrumentid", "lot", "min_price_diff", "date_listed", "date_last", "date_delivery_last"
from "YYYYins")
where "instrumentid" not in ('S0001', 'S9901', 'S9909')
ORDER BY "date_listed";
""",
}
##
sqls_ins_wwww = {}
sqls_ins_xxxx_fo = {}
sqls_ins =(sqls_ins_zzzz, sqls_ins_xxxx, sqls_ins_yyyy, sqls_ins_wwww, sqls_ins_xxxx_fo)
sqls_daily_zzzz = {
'create_tmp':
"""
CREATE TEMP TABLE IF NOT EXISTS "tmp_daily" (
"tradingday" INT NOT NULL,
"instrumentid" TEXT NOT NULL,
"open" DOUBLE ,
"high" DOUBLE ,
"low" DOUBLE ,
"close" DOUBLE NOT NULL,
"volume" INT NOT NULL,
"openint" INT NOT NULL,
"settlementprice" DOUBLE NOT NULL,
"amount" DOUBLE NOT NULL,
"PreSettlementPrice" DOUBLE NOT NULL,
"PreClosePrice" DOUBLE NOT NULL,
"deltaPrice1" DOUBLE NOT NULL,
"deltaPrice2" DOUBLE NOT NULL,
UNIQUE ("tradingday", "instrumentid")
);
""",
'delete_tmp': 'DELETE FROM "tmp_daily";',
'insert_tmp':
"""INSERT INTO "tmp_daily"
("instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint")
values (?,?,?,?,?,?,?,?,?,?,?,?,?,?); """,
'diff_tmp':
""" select * from (select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "tmp_daily" except select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "daily") order by "tradingday"; """,
'insert_replace':
""" INSERT OR REPLACE INTO "daily"(
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint") select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from ( select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "tmp_daily" except select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "daily") order by "tradingday"; """,
'diff_tmp_grouped':
""" select "tradingday", count(*) "cnt" from ( select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "tmp_daily" except select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "daily") group by "tradingday" order by "tradingday"; """,
'print_count': "select count(*) from daily;",
'limited_print': """select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from daily order by id desc limit 30;""",
'insert_ignore':
""" INSERT OR IGNORE INTO "daily" (
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
) select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "tmp_daily" order by "tradingday"; """,
'insert':"""
INSERT INTO "daily"(
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint") select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from (select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "tmp_daily" except select
"instrumentid","tradingday","precloseprice","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","amount","openint"
from "daily") order by "tradingday";
""",
}
##
sqls_daily_xxxx = {
'create_tmp':
"""
CREATE TEMP TABLE IF NOT EXISTS "tmp_daily" (
"tradingday" INT NOT NULL,
"instrumentid" TEXT NOT NULL,
"open" DOUBLE NOT NULL,
"high" DOUBLE NOT NULL,
"low" DOUBLE NOT NULL,
"close" DOUBLE NOT NULL,
"volume" INT NOT NULL,
"openint" INT NOT NULL,
"settlementprice" DOUBLE NOT NULL,
"amount" DOUBLE NOT NULL,
"PreSettlementPrice" DOUBLE NOT NULL,
-- PreClosePrice DOUBLE NOT NULL,
"deltaPrice1" DOUBLE NOT NULL,
"deltaPrice2" DOUBLE NOT NULL,
"deltaOpenint" INT NOT NULL,
"settlementForDelivery" DOUBLE NOT NULL,
UNIQUE ("tradingday", "instrumentid"),
CHECK (("open" between "low" and "high") and ("close" between "low" and "high")));
""",
'delete_tmp': 'DELETE FROM "tmp_daily";',
'insert_tmp':
"""INSERT INTO "tmp_daily"
("tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery") values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?); """,
'diff_tmp':
""" select * from (select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "tmp_daily" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "daily") order by "tradingday";""",
'insert_replace':
""" INSERT OR REPLACE INTO "daily" (
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery")
select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from ( select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "tmp_daily" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "daily") order by "tradingday"; """,
'diff_tmp_grouped':
""" select "tradingday", count(*) "cnt" from ( select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "tmp_daily" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "daily") group by "tradingday" order by "tradingday"; """,
'print_count': 'select count(*) from "daily";',
'limited_print': """
select
"id","tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "daily" where "volume" > 0 order by "id" desc limit 30;
""",
'insert_ignore':
""" INSERT OR IGNORE INTO "daily" (
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "tmp_daily" order by "tradingday"; """,
'insert':"""
INSERT INTO "daily" (
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery")
select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from ( select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "tmp_daily" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","settlementForDelivery"
from "daily") order by "tradingday";
""",
}
##
sqls_daily_yyyy = {}
##
sqls_daily_wwww = {}
##
sqls_daily_xxxx_fo = {
'create_tmp':
"""
CREATE TEMP TABLE IF NOT EXISTS "tmp_daily_futopt" (
"tradingday" INT NOT NULL,
"instrumentid" TEXT NOT NULL,
"open" DOUBLE NOT NULL,
"high" DOUBLE NOT NULL,
"low" DOUBLE NOT NULL,
"close" DOUBLE NOT NULL,
"volume" INT NOT NULL,
"openint" INT NOT NULL,
"settlementprice" DOUBLE NOT NULL,
"amount" DOUBLE NOT NULL,
"PreSettlementPrice" DOUBLE NOT NULL,
"deltaPrice1" DOUBLE NOT NULL,
"deltaPrice2" DOUBLE NOT NULL,
"deltaOpenint" INT NOT NULL,
"delta" DOUBLE NOT NULL,
"impl_vol" DOUBLE NOT NULL,
"vol_exe" INTEGER NOT NULL,
UNIQUE ("tradingday", "instrumentid"),
CHECK (("open" between "low" and "high") and ("close" between "low" and "high")));
""",
'delete_tmp': 'DELETE FROM "tmp_daily_futopt";',
'insert_tmp':
"""INSERT INTO "tmp_daily_futopt"
("tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe") values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?); """,
'diff_tmp':
""" select * from (select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "tmp_daily_futopt" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "daily_futopt") order by "tradingday";""",
'insert_replace':
""" INSERT OR REPLACE INTO "daily_futopt" (
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe")
select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from ( select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "tmp_daily_futopt" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "daily_futopt") order by "tradingday"; """,
'diff_tmp_grouped':
""" select "tradingday", count(*) "cnt" from ( select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "tmp_daily_futopt" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "daily_futopt") group by "tradingday" order by "tradingday"; """,
'print_count': 'select count(*) from "daily_futopt";',
'limited_print': """
select
"id","tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "daily_futopt" where "volume" > 0 order by "id" desc limit 30;
""",
'insert_ignore':
""" INSERT OR IGNORE INTO "daily_futopt" (
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "tmp_daily_futopt" order by "tradingday"; """,
'insert':"""
INSERT INTO "daily_futopt" (
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe")
select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from ( select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "tmp_daily_futopt" except select
"tradingday","instrumentid","presettlementprice","open","high","low","close","settlementprice","deltaPrice1","deltaPrice2","volume","openint","deltaOpenint","amount","delta","impl_vol","vol_exe"
from "daily_futopt") order by "tradingday";
""",
}
sqls_daily = (sqls_daily_zzzz, sqls_daily_xxxx, sqls_daily_yyyy, sqls_daily_wwww, sqls_daily_xxxx_fo)
| 48.920082
| 245
| 0.703891
| 2,752
| 23,873
| 5.900073
| 0.067951
| 0.024142
| 0.076861
| 0.08573
| 0.888649
| 0.877625
| 0.858225
| 0.842397
| 0.830572
| 0.825152
| 0
| 0.008724
| 0.106941
| 23,873
| 487
| 246
| 49.020534
| 0.752861
| 0.00088
| 0
| 0.416327
| 1
| 0.097959
| 0.650804
| 0.299598
| 0
| 0
| 0
| 0
| 0.004082
| 0
| null | null | 0
| 0.008163
| null | null | 0.138776
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
486c6736e43246d360dbfa3421cf92c6a8d90770
| 8,057
|
py
|
Python
|
s3splitmerge/tests/run.py
|
MacHu-GWU/s3splitmerge-project
|
873892158f4a2d0ee20f291e5d3b2a80f0bae1ba
|
[
"MIT"
] | null | null | null |
s3splitmerge/tests/run.py
|
MacHu-GWU/s3splitmerge-project
|
873892158f4a2d0ee20f291e5d3b2a80f0bae1ba
|
[
"MIT"
] | null | null | null |
s3splitmerge/tests/run.py
|
MacHu-GWU/s3splitmerge-project
|
873892158f4a2d0ee20f291e5d3b2a80f0bae1ba
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import typing
import awswrangler as wr
from .data import (
create_s3_csv_file,
create_s3_json_file,
create_many_parquet_file,
create_many_json_file,
)
from ..merge import (
merge_parquet_by_prefix,
merge_json_by_prefix,
)
from ..helpers import (
is_s3_object_exists,
)
def run_test_split_csv(
boto_ses,
n_k_rows: int,
header: bool,
source_bucket: str,
source_key: str,
target_bucket: str,
target_key: str,
target_size_or_rows: int,
split_csv_func: typing.Callable,
force_redo: bool,
):
"""
A parameterized split_csv_... function unit test executor.
"""
s3_client = boto_ses.client("s3")
# Create single source csv file if not exists
if (force_redo) or (not is_s3_object_exists(s3_client, source_bucket, source_key)):
create_s3_csv_file(
boto_ses=boto_ses,
n_k_rows=n_k_rows,
header=header,
bucket=source_bucket,
key=source_key,
)
# If first target file dosn't exist, execute split csv
first_target_key = target_key.format(i=1)
if (force_redo) or (not is_s3_object_exists(s3_client, target_bucket, first_target_key)):
split_csv_func(
s3_client,
source_bucket,
source_key,
target_bucket,
target_key,
target_size_or_rows,
header,
)
# Verify small target csv files
common_target_key_prefix = target_key.replace("{i}.csv", "")
response = s3_client.list_objects(Bucket=target_bucket, Prefix=common_target_key_prefix)
n_rows_total = 0
previous_last_id = None
if header:
read_csv_additional_kwargs = {}
else:
read_csv_additional_kwargs = {"header": None}
for nth_file, obj_meta in enumerate(response["Contents"]):
nth_file += 1
key = obj_meta["Key"]
df = wr.s3.read_csv(
path=f"s3://{target_bucket}/{key}",
boto3_session=boto_ses,
**read_csv_additional_kwargs
)
n_rows = df.shape[0]
if header:
first_id = df["id"].head(1).tolist()[0]
last_id = df["id"].tail(1).tolist()[0]
else:
first_id = df[df.columns[0]].head(1).tolist()[0]
last_id = df[df.columns[0]].tail(1).tolist()[0]
n_rows_total += n_rows
if nth_file != 1:
assert previous_last_id + 1 == first_id
previous_last_id = last_id
assert n_rows_total == n_k_rows * 1000
def run_test_split_json(
boto_ses,
n_k_rows: int,
source_bucket: str,
source_key: str,
target_bucket: str,
target_key: str,
target_size_or_rows: int,
split_json_func: typing.Callable,
force_redo: bool,
):
"""
A parameterized split_json_... function unit test executor.
"""
s3_client = boto_ses.client("s3")
# Create single source csv file if not exists
if (force_redo) or (not is_s3_object_exists(s3_client, source_bucket, source_key)):
create_s3_json_file(
boto_ses=boto_ses,
n_k_rows=n_k_rows,
bucket=source_bucket,
key=source_key,
)
# If first target not exists, execute split csv
first_target_key = target_key.format(i=1)
if (force_redo) or (not is_s3_object_exists(s3_client, target_bucket, first_target_key)):
split_json_func(
s3_client,
source_bucket,
source_key,
target_bucket,
target_key,
target_size_or_rows,
)
# Verify small target json files
common_target_key_prefix = target_key.replace("{i}.json", "")
response = s3_client.list_objects(Bucket=target_bucket, Prefix=common_target_key_prefix)
n_rows_total = 0
previous_last_id = None
for nth_file, obj_meta in enumerate(response["Contents"]):
nth_file += 1
key = obj_meta["Key"]
df = wr.s3.read_json(
path=f"s3://{target_bucket}/{key}",
orient="records",
lines=True,
)
n_rows = df.shape[0]
first_id = df["id"].head(1).tolist()[0]
last_id = df["id"].tail(1).tolist()[0]
n_rows_total += n_rows
if nth_file != 1:
assert previous_last_id + 1 == first_id
previous_last_id = last_id
assert n_rows_total == n_k_rows * 1000
def run_test_merge_parquet(boto_ses,
n_files: int,
n_rows_per_file: int,
source_bucket: str,
source_key: str,
target_bucket: str,
target_key: str,
target_size: int,
force_redo: bool):
s3_client = boto_ses.client("s3")
# Create many parquet test dummy data
create_many_parquet_file(
boto_ses,
n_files=n_files,
n_rows_per_file=n_rows_per_file,
bucket=source_bucket,
key=source_key,
overwrite=True,
)
# Merge files
common_source_key_prefix = source_key.replace("{i}.parquet", "")
merge_parquet_by_prefix(
boto3_session=boto_ses,
source_bucket=source_bucket,
source_key_prefix=common_source_key_prefix,
target_bucket=target_bucket,
target_key=target_key,
target_size=target_size,
)
# Verify merged parquet files
common_target_key_prefix = target_key.replace("{i}.parquet", "")
response = s3_client.list_objects(Bucket=target_bucket, Prefix=common_target_key_prefix)
n_rows_total = 0
previous_last_id = None
for nth_file, obj_meta in enumerate(response["Contents"]):
nth_file += 1
key = obj_meta["Key"]
df = wr.s3.read_parquet(path=f"s3://{target_bucket}/{key}", boto3_session=boto_ses)
n_rows = df.shape[0]
first_id = df["id"].head(1).tolist()[0]
last_id = df["id"].tail(1).tolist()[0]
n_rows_total += n_rows
if nth_file != 1:
assert previous_last_id + 1 == first_id
previous_last_id = last_id
assert n_rows_total == n_files * n_rows_per_file
def run_test_merge_json(boto_ses,
n_files: int,
n_rows_per_file: int,
source_bucket: str,
source_key: str,
target_bucket: str,
target_key: str,
target_size: int,
force_redo: bool):
s3_client = boto_ses.client("s3")
# Create many parquet test dummy data
create_many_json_file(
boto_ses,
n_files=n_files,
n_rows_per_file=n_rows_per_file,
bucket=source_bucket,
key=source_key,
overwrite=True,
)
# Merge files
common_source_key_prefix = source_key.replace("{i}.json", "")
merge_json_by_prefix(
s3_client=s3_client,
source_bucket=source_bucket,
source_key_prefix=common_source_key_prefix,
target_bucket=target_bucket,
target_key=target_key,
target_size=target_size,
)
# Verify merged parquet files
common_target_key_prefix = target_key.replace("{i}.json", "")
response = s3_client.list_objects(Bucket=target_bucket, Prefix=common_target_key_prefix)
n_rows_total = 0
previous_last_id = None
for nth_file, obj_meta in enumerate(response["Contents"]):
nth_file += 1
key = obj_meta["Key"]
df = wr.s3.read_json(
path=f"s3://{target_bucket}/{key}",
orient="records",
lines=True,
boto3_session=boto_ses,
)
n_rows = df.shape[0]
first_id = df["id"].head(1).tolist()[0]
last_id = df["id"].tail(1).tolist()[0]
n_rows_total += n_rows
if nth_file != 1:
assert previous_last_id + 1 == first_id
previous_last_id = last_id
assert n_rows_total == n_files * n_rows_per_file
| 31.84585
| 93
| 0.601092
| 1,073
| 8,057
| 4.129543
| 0.101584
| 0.056872
| 0.027082
| 0.037915
| 0.86459
| 0.854435
| 0.84789
| 0.843376
| 0.843376
| 0.791695
| 0
| 0.016744
| 0.303215
| 8,057
| 252
| 94
| 31.972222
| 0.772533
| 0.067147
| 0
| 0.744186
| 0
| 0
| 0.032785
| 0.013917
| 0
| 0
| 0
| 0
| 0.037209
| 1
| 0.018605
| false
| 0
| 0.023256
| 0
| 0.04186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
486fe06032e8dd5fd55768a1304d967d96385a3c
| 3,099
|
py
|
Python
|
workflows/vipercharts/visualization_views.py
|
xflows/clowdflows
|
697b36ebc976d1ba4ab726bda2fc4593422af080
|
[
"MIT"
] | 38
|
2015-11-21T08:16:14.000Z
|
2021-06-22T16:14:12.000Z
|
workflows/vipercharts/visualization_views.py
|
chimeng089/clowdflows
|
e19bf57906e893d8f0be93329168b76eae758384
|
[
"MIT"
] | 21
|
2015-12-29T16:34:48.000Z
|
2022-03-11T23:14:48.000Z
|
workflows/vipercharts/visualization_views.py
|
chimeng089/clowdflows
|
e19bf57906e893d8f0be93329168b76eae758384
|
[
"MIT"
] | 26
|
2016-01-11T17:51:07.000Z
|
2022-02-24T11:49:40.000Z
|
import sys
from django.shortcuts import render
from django.http import Http404, HttpResponse
from workflows import module_importer
from django.shortcuts import render
def vipercharts_display_summation(request,input_dict,output_dict,widget):
if sum(input_dict['intList']) == input_dict['sum']:
check = 'The calculation appears correct.'
else:
check = 'The calculation appears incorrect!'
return render(request, 'visualizations/vipercharts_display_integers.html',{'widget':widget,'input_dict':input_dict, 'output_dict':output_dict, 'check':check})
# Scatter charts
def vipercharts_pr_space_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/pr_space.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_roc_space_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/roc_space.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
# Curve charts
def vipercharts_roc_curve_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/roc_curve.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_roc_hull_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/roc_hull.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_pr_curve_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/pr_curve.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_lift_curve_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/lift_curve.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_cost_curve_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/cost_curve.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_kendall_curve_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/kendall_curve.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_rate_curve_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/rate_curve.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
# Column charts
def vipercharts_column_chart_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/column_chart.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
def vipercharts_eval_bar_chart_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/eval_bar_chart.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
# Related results table
def vipercharts_related_table_view(request,input_dict,output_dict,widget):
return render(request, 'visualizations/related_results_table.html',{'widget':widget,'input_dict':input_dict,'output_dict':output_dict})
| 57.388889
| 162
| 0.805744
| 420
| 3,099
| 5.621429
| 0.128571
| 0.15629
| 0.231258
| 0.209233
| 0.785684
| 0.759424
| 0.74587
| 0.74587
| 0.74587
| 0.74587
| 0
| 0.001041
| 0.070023
| 3,099
| 54
| 163
| 57.388889
| 0.818182
| 0.020652
| 0
| 0.057143
| 0
| 0
| 0.281188
| 0.138614
| 0
| 0
| 0
| 0
| 0
| 1
| 0.371429
| false
| 0
| 0.142857
| 0.342857
| 0.885714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
4888f503dba32549a0d734b814dedde61eb7feca
| 24,595
|
py
|
Python
|
tests/test_match.py
|
DalavanCloud/tmppy
|
cdde676ba9d5011b7d2a46a9852e5986b90edbbc
|
[
"Apache-2.0"
] | 1
|
2018-09-01T18:14:26.000Z
|
2018-09-01T18:14:26.000Z
|
tests/test_match.py
|
DalavanCloud/tmppy
|
cdde676ba9d5011b7d2a46a9852e5986b90edbbc
|
[
"Apache-2.0"
] | null | null | null |
tests/test_match.py
|
DalavanCloud/tmppy
|
cdde676ba9d5011b7d2a46a9852e5986b90edbbc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from py2tmp.testing import *
@assert_compilation_succeeds()
def test_match_success():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double'),
Type.pointer(Type.function(Type('int'), [U])):
U,
Type.pointer(Type.function(Type('float'), [U])):
U,
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == Type('int')
@assert_compilation_succeeds()
def test_match_in_assignment_success():
from tmppy import Type, match
def f(x: Type):
result = match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double'),
Type.pointer(Type.function(Type('int'), [T])):
T,
Type.pointer(Type.function(Type('float'), [T])):
T,
})
return result
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == Type('int')
@assert_compilation_succeeds()
def test_match_calling_function_success():
from tmppy import Type, match
def id(x: Type):
return x
def f(x: Type):
result = match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double'),
Type.pointer(Type.function(Type('int'), [T])):
id(T),
Type.pointer(Type.function(Type('float'), [T])):
T,
})
return result
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == Type('int')
@assert_compilation_succeeds()
def test_match_multiple_success():
from tmppy import Type, match
def f(y: Type):
return match(Type.pointer(Type('int')), y)(lambda T, U: {
(T, U):
False,
(Type.pointer(T), Type.pointer(U)):
True,
})
assert f(Type.pointer(Type.pointer(Type('double'))))
@assert_compilation_succeeds()
def test_match_with_capture_success():
from tmppy import Type, match
def f(x: Type, y: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double'),
Type.pointer(Type.function(Type('int'), [T])):
y,
Type.pointer(Type.function(Type('float'), [T])):
T,
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')])), Type('bool')) == Type('bool')
@assert_compilation_succeeds()
def test_nested_match_success():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
T:
Type('double'),
Type.pointer(Type.function(T, [U])):
match(T, U)(lambda V: {
(Type('int'), V):
V,
(Type('float'), V):
Type('bool'),
}),
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == Type('int')
@assert_compilation_succeeds()
def test_nested_match_with_capture_outer_only():
from tmppy import Type, match
def f(x: Type, y: Type):
return match(x)(lambda T, U: {
T:
y,
Type.pointer(Type.function(T, [U])):
match(T, U)(lambda V: {
(Type('int'), V):
V,
(Type('float'), V):
Type('bool'),
}),
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')])), Type('bool')) == Type('int')
@assert_compilation_succeeds()
def test_nested_match_with_capture():
from tmppy import Type, match
def f(x: Type, y: Type):
return match(x)(lambda T, U: {
T:
y,
Type.pointer(Type.function(T, [U])):
match(T, U)(lambda V: {
(Type('int'), V):
y,
(Type('float'), V):
Type('bool'),
}),
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')])), Type('bool')) == Type('bool')
@assert_compilation_succeeds()
def test_match_with_equality_comparison_success():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double') == Type('int'),
Type.pointer(Type.function(Type('int'), [T])):
T == Type('int'),
Type.pointer(Type.function(Type('float'), [T])):
T == Type('int'),
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')])))
@assert_compilation_succeeds()
def test_match_with_function_expr_call():
from tmppy import Type, match
def g(x: Type):
return x
def h(x: Type):
return g
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
h(T)(Type('double')),
Type.pointer(Type.function(Type('int'), [T])):
T,
Type.pointer(Type.function(Type('float'), [T])):
T,
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == Type('int')
@assert_compilation_succeeds()
def test_match_with_list_expr_call():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
[Type('double')],
Type.pointer(Type.function(Type('int'), [T])):
[T],
Type.pointer(Type.function(Type('float'), [T])):
[T],
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == [Type('int')]
@assert_compilation_succeeds()
def test_match_with_set_expr_call():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
{Type('double')},
Type.pointer(Type.function(Type('int'), [T])):
{T},
Type.pointer(Type.function(Type('float'), [T])):
{T},
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == {Type('int')}
@assert_compilation_succeeds()
def test_match_with_int_expr_call():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
1,
Type.pointer(Type.function(Type('int'), [T])):
2,
Type.pointer(Type.function(Type('float'), [T])):
3,
})
assert f(Type.pointer(Type.function(Type('int'), [Type('int')]))) == 2
@assert_compilation_succeeds()
def test_match_main_definition_uses_param_success():
from tmppy import Type, match, empty_list
def f(x: Type):
return match(x)(lambda T: {
T:
T,
Type.pointer(Type.function(Type('int'), empty_list(Type))):
Type('bool'),
})
assert f(Type('int')) == Type('int')
@assert_compilation_succeeds()
def test_reference_type_expr_as_match_expr_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.reference(T):
T,
T:
Type('double'),
})
assert f(Type.reference(Type('int'))) == Type('int')
@assert_compilation_succeeds()
def test_reference_type_expr_as_match_expr_not_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.reference(T):
T,
T:
Type('double'),
})
assert f(Type.rvalue_reference(Type('int'))) == Type('double')
@assert_compilation_succeeds()
def test_rvalue_reference_type_expr_as_match_expr_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.rvalue_reference(T):
T,
T:
Type('double'),
})
assert f(Type.rvalue_reference(Type('int'))) == Type('int')
@assert_compilation_succeeds()
def test_rvalue_reference_type_expr_as_match_expr_not_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.rvalue_reference(T):
T,
T:
Type('double'),
})
assert f(Type.pointer(Type('int'))) == Type('double')
@assert_compilation_succeeds()
def test_const_type_expr_as_match_expr_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.const(T):
T,
T:
Type('double'),
})
assert f(Type.const(Type('int'))) == Type('int')
@assert_compilation_succeeds()
def test_const_type_expr_as_match_expr_not_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.const(T):
T,
T:
Type('double'),
})
assert f(Type.pointer(Type('int'))) == Type('double')
@assert_compilation_succeeds()
def test_array_type_expr_as_match_expr_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.array(T):
T,
T:
Type('double'),
})
assert f(Type.array(Type('int'))) == Type('int')
@assert_compilation_succeeds()
def test_array_type_expr_as_match_expr_not_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.array(T):
T,
T:
Type('double'),
})
assert f(Type.pointer(Type('int'))) == Type('double')
@assert_compilation_succeeds()
def test_template_instantiation_type_expr_as_match_expr_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.template_instantiation('std::tuple', [T, Type('float')]):
T,
T:
Type('double'),
})
assert f(Type.template_instantiation('std::tuple', [Type('int'), Type('float')])) == Type('int')
@assert_compilation_succeeds()
def test_template_instantiation_type_expr_as_match_expr_not_matched_success():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type.template_instantiation('std::tuple', [T, Type('float')]):
T,
T:
Type('double'),
})
assert f(Type.template_instantiation('std::tuple', [Type('int'), Type('void')])) == Type('double')
@assert_compilation_succeeds()
def test_match_expr_with_trivial_specialization_and_no_free_variables():
from tmppy import Type, match
def f(t: Type):
return match(t)(lambda T: {
Type('int'):
Type('int'),
T:
Type('double'),
})
assert f(Type('void')) == Type('double')
@assert_conversion_fails
def test_match_keyword_argument_in_match_exprs():
from tmppy import Type, match
def f(x: Type):
return match(x,
wrong_arg=True)( # error: Keyword arguments are not allowed in match
lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double'),
Type.pointer(Type.function(Type('int'), [T])):
T,
})
@assert_conversion_fails
def test_match_vararg_in_match_exprs():
from tmppy import Type, match
def f(x: Type):
return match(x)(
lambda *T: { # error: Malformed match\(\): vararg lambda arguments are not supported
T: T,
})
@assert_conversion_fails
def test_match_keyword_argument_in_match_mapping():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double'),
Type.pointer(Type.function(Type('int'), [T])):
T,
},
wrong_arg=True) # error: Keyword arguments are not allowed in match
@assert_conversion_fails
def test_match_multiple_arguments_in_mapping():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: { # error: Malformed match\(\)
Type.pointer(Type.function(T, [U])):
Type('double'),
Type.pointer(Type.function(Type('int'), [T])):
T,
},
{})
@assert_conversion_fails
def test_match_non_dict_argument_in_mapping():
from tmppy import Type, match
def f(x: Type):
return match(x)( # error: Malformed match\(\)
Type('int'))
@assert_conversion_fails
def test_match_no_match_exprs_error():
from tmppy import Type, match
def f(x: Type):
return match()(lambda T: { # error: Found match\(\) with no arguments; it must have at least 1 argument.
T:
T,
})
@assert_conversion_fails
def test_match_no_mappings_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda:
{}) # error: An empty mapping dict was passed to match\(\), but at least 1 mapping is required.
@assert_conversion_fails
def test_match_bool_expr_error():
from tmppy import Type, match
def f(x: bool):
return match(x)(lambda T: { # error: All arguments passed to match must have type Type, but an argument with type bool was specified.
T:
T,
})
@assert_conversion_fails
def test_match_function_expr_error():
from tmppy import Type, match
from typing import Callable
def f(x: Callable[[Type], Type]):
return match(x)(lambda T: { # error: All arguments passed to match must have type Type, but an argument with type \(Type\) -> Type was specified.
T:
T,
})
@assert_conversion_fails
def test_match_key_expr_not_type_pattern():
from tmppy import Type, match
def f(x: Type):
return match(x)( # note: The corresponding match\(\) was here
lambda: {
True: # error: Type patterns must have type Type but this pattern has type bool
15,
})
@assert_conversion_fails
def test_match_value_expr_not_lambda():
from tmppy import Type, match
def f(x: Type):
return match(x)({ # error: Malformed match\(\)
Type('int'):
Type('int'),
})
@assert_conversion_fails
def test_match_value_expr_lambda_not_returning_dict():
from tmppy import Type, match
def f(x: Type):
return match(x)( # error: Malformed match\(\)
lambda: 15)
@assert_conversion_fails
def test_match_pattern_with_no_arguments_error():
from tmppy import Type, match
def f(x: Type):
return match(x)( # note: The corresponding match\(\) was here
lambda: {
(): # error: 0 type patterns were provided, while 1 were expected
Type('int'),
})
@assert_conversion_fails
def test_match_pattern_argument_type_mismatch_error():
from tmppy import Type, match
def f(x: Type):
return match(
True)( # error: All arguments passed to match must have type Type, but an argument with type bool was specified.
lambda T: {
T:
T,
})
@assert_conversion_fails
def test_match_pattern_with_type_expr():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T: {
x:
T, # error: T was used in the result of this match branch but not in any of its patterns
})
@assert_conversion_fails
def test_match_mappings_with_different_types_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
Type.pointer(Type.function(T, [U])):
Type('double'), # note: A previous branch returning a Type was here.
Type.pointer(Type.function(Type('int'), [T])):
True, # error: All branches in a match\(\) must return the same type, but this branch returns a bool while a previous branch in this match expression returns a Type
})
@assert_conversion_fails
def test_match_multiple_mappings_that_specialize_nothing():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda T, U: {
T: # note: A previous specialization that specializes nothing was here
Type('double'),
U: # error: Found multiple specializations that specialize nothing
Type('int'),
})
@assert_conversion_fails
def test_match_lambda_var_not_in_pattern_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda Baz: # error: The lambda argument Baz was not used in any pattern, it should be removed.
{
Type('Bar'):
Type('double'),
})
@assert_conversion_fails
def test_match_expr_containing_comparison_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
1 < 2: # error: Comparisons are not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_attribute_access_error():
from tmppy import Type, match
class MyType:
def __init__(self, x: bool):
self.x = x
def f(m: MyType):
return match(Type('int'))(lambda T: {
m.x: # error: Attribute access is not allowed in match patterns
T,
})
@assert_conversion_fails
def test_match_expr_containing_not_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
not True: # error: The "not" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_and_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
True and False: # error: The "and" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_or_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
True or False: # error: The "or" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_unary_minus_error():
from tmppy import Type, match
def f(x: Type):
n = 5
return match(x)(lambda: {
-n: # error: The "-" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_binary_plus_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
3 + 5: # error: The "\+" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_binary_minus_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
3 - 5: # error: The "-" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_multiplication_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
3 * 5: # error: The "\*" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_division_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
3 // 5: # error: The "//" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_modulus_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
3 % 5: # error: The "%" operator is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_list_comprehension_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
[n for n in [1, 2]]: # error: List comprehensions are not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_set_comprehension_error():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
{n for n in {1, 2}}: # error: Set comprehensions are not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_type_template_member():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
Type.template_member(): # error: Type.template_member\(\) is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_sum():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
sum([]): # error: sum\(\) is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_any():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
any([]): # error: any\(\) is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_all():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
all([]): # error: all\(\) is not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_equality_comparison():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
3 == 3: # error: Comparisons are not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_inequality_comparison():
from tmppy import Type, match
def f(x: Type):
return match(x)(lambda: {
3 != 3: # error: Comparisons are not allowed in match patterns
x,
})
@assert_conversion_fails
def test_match_expr_containing_function_call():
from tmppy import Type, match
def g(n: int):
return Type('int')
def f(x: Type):
return match(x)(lambda: {
g(3): # error: Function calls are not allowed in match patterns
x,
})
@assert_compilation_succeeds()
def test_match_expr_requiring_to_pick_arbitrary_arg():
from tmppy import Type, match
def f(b: bool):
return match(Type('int'))(lambda: {
Type('int'):
42,
})
assert f(True) == 42
@assert_compilation_succeeds()
def test_match_expr_requiring_to_pick_arbitrary_arg_skipping_function_type():
from tmppy import Type, match
from typing import Callable
def f(g: Callable[[int], int], b: bool):
return match(Type('int'))(lambda: {
Type('int'):
42,
})
def h(n: int):
return n
assert f(h, True) == 42
@assert_compilation_succeeds()
def test_match_expr_requiring_to_pick_arbitrary_function_type_arg():
from tmppy import Type, match
from typing import Callable
def f(g: Callable[[int], int]):
return match(Type('int'))(lambda: {
Type('int'):
42,
})
def h(n: int):
return n
assert f(h) == 42
| 32.576159
| 181
| 0.575402
| 3,089
| 24,595
| 4.407252
| 0.074458
| 0.043705
| 0.072719
| 0.092111
| 0.838769
| 0.828559
| 0.807771
| 0.781842
| 0.728074
| 0.718378
| 0
| 0.003204
| 0.301972
| 24,595
| 754
| 182
| 32.619363
| 0.789783
| 0.129335
| 0
| 0.75816
| 0
| 0
| 0.028796
| 0
| 0
| 0
| 0
| 0
| 0.139466
| 1
| 0.206231
| false
| 0
| 0.103858
| 0.102374
| 0.418398
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
6f897f3a8f22497993a85c4b6399804300f85162
| 41,963
|
py
|
Python
|
Online_Coalition_Game/views.py
|
JoeriWissink/OnlineCoalitionGame
|
a61126319dd3d28b96279ae1b4af6a1cc0ba1d93
|
[
"MIT"
] | 1
|
2021-03-29T17:35:58.000Z
|
2021-03-29T17:35:58.000Z
|
Online_Coalition_Game/views.py
|
JoeriWissink/OnlineCoalitionGame
|
a61126319dd3d28b96279ae1b4af6a1cc0ba1d93
|
[
"MIT"
] | null | null | null |
Online_Coalition_Game/views.py
|
JoeriWissink/OnlineCoalitionGame
|
a61126319dd3d28b96279ae1b4af6a1cc0ba1d93
|
[
"MIT"
] | null | null | null |
from otree.api import Currency as c, currency_range
from ._builtin import Page, WaitPage
from .models import Constants
import random
import string
class Waitforgroup(WaitPage):
template_name = 'Online_Coalition_Game/TimerWaitPage.html'
group_by_arrival_time = True
startwp_timer = 5 * 60
use_task = False
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def vars_for_template(self):
return self.player.vars_for_template()
class Groupingconfirmation(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.participant.vars['grouped'] = True
self.player.leftover_check()
class Sliderinstructions(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.session.config['earned'] == True and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
def vars_for_template(self):
return self.player.vars_for_template()
class Slider(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.session.config['earned'] == True and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['slider_time']
form_model = 'player'
def get_form_fields(self):
return ['slider{}'.format(i) for i in range(1, 22)]
class EndRound1(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.session.config['earned'] == True and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
class Slider2(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.session.config[
'earned'] == True and self.participant.vars['kicked'] == False and self.participant.vars[
'leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['slider_time']
form_model = 'player'
def get_form_fields(self):
return ['slider{}'.format(i) for i in range(22, 43)]
class EndRound2(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.session.config['earned'] == True and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
class Slider3(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.session.config[
'earned'] == True and self.participant.vars['kicked'] == False and self.participant.vars[
'leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['slider_time']
form_model = 'player'
def get_form_fields(self):
return ['slider{}'.format(i) for i in range(43, 64)]
def before_next_page(self):
self.player.score = 0
self.player.leftover_check()
sliders = [self.player.slider1, self.player.slider2, self.player.slider3, self.player.slider4, self.player.slider5, self.player.slider6, self.player.slider7, self.player.slider8, self.player.slider9, self.player.slider10, self.player.slider11, self.player.slider12, self.player.slider13, self.player.slider14, self.player.slider15, self.player.slider16, self.player.slider17, self.player.slider18, self.player.slider19, self.player.slider20, self.player.slider21, self.player.slider22, self.player.slider23, self.player.slider24, self.player.slider25, self.player.slider26, self.player.slider27, self.player.slider28, self.player.slider29, self.player.slider30, self.player.slider31, self.player.slider32, self.player.slider33, self.player.slider34, self.player.slider35, self.player.slider36, self.player.slider37, self.player.slider38, self.player.slider39, self.player.slider40, self.player.slider41, self.player.slider42, self.player.slider43, self.player.slider44, self.player.slider45, self.player.slider46, self.player.slider47, self.player.slider48, self.player.slider49, self.player.slider50, self.player.slider51, self.player.slider52, self.player.slider53, self.player.slider54, self.player.slider55, self.player.slider56, self.player.slider57, self.player.slider58, self.player.slider59, self.player.slider60, self.player.slider61, self.player.slider62, self.player.slider63]
for slider in sliders:
if slider == 50:
self.player.score += 1
self.participant.vars['score'] = self.player.score
class Waitforparticipants(WaitPage):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.session.config['earned'] == True and self.participant.vars['grouped'] == True and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
title_text = "Waiting for other participants"
body_text = "Waiting for the other two participants to complete the slider task. Please wait."
def after_all_players_arrive(self):
scores = []
p1 = self.group.get_player_by_id(1)
p2 = self.group.get_player_by_id(2)
p3 = self.group.get_player_by_id(3)
for p in self.group.get_players():
summary = (p.id_in_group, p.score)
scores.append(summary)
sorted_scores = sorted(scores, key=lambda tup: tup[1], reverse=True)
A = sorted_scores[0]
B = sorted_scores[1]
C = sorted_scores[2]
if A[0] == 1:
p1.position = 'A'
p1.resources = self.session.config['resources_player_A']
elif A[0] == 2:
p2.position = 'A'
p2.resources = self.session.config['resources_player_A']
elif A[0] == 3:
p3.position = 'A'
p3.resources = self.session.config['resources_player_A']
if B[0] == 1:
p1.position = 'B'
p1.resources = self.session.config['resources_player_B']
elif B[0] == 2:
p2.position = 'B'
p2.resources = self.session.config['resources_player_B']
elif B[0] == 3:
p3.position = 'B'
p3.resources = self.session.config['resources_player_B']
if C[0] == 1:
p1.position = 'C'
p1.resources = self.session.config['resources_player_C']
elif C[0] == 2:
p2.position = 'C'
p2.resources = self.session.config['resources_player_C']
elif C[0] == 3:
p3.position = 'C'
p3.resources = self.session.config['resources_player_C']
class PositionAssignment(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False and self.session.config['earned'] == True and self.round_number == 1:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
def vars_for_template(self):
return self.player.vars_for_template()
class AssignedPosition(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def vars_for_template(self):
return self.player.vars_for_template()
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class InstructionsCoalitions(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class ComprehensionCheck(Page):
form_model = 'player'
form_fields = ['comprehension_money']
def is_displayed(self):
if self.session.config['comprehension_check'] == True and self.round_number == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def vars_for_template(self):
vars = self.player.vars_for_template()
vars.update({
'money_label': "How large is the budget that can be allocated?"})
return vars
def comprehension_money_choices(self):
choices = [
[0, 'Always ${} million'.format(self.session.config['total_payoff'])],
[1, 'This depends on the size of the coalition'],
]
return choices
def comprehension_money_error_message(self, value):
if value == 1:
self.player.comprehension_money_fail = 1
return "Your answer is incorrect. The budget is ${} million, regardless of the size of the coalition.".format(
self.session.config['total_payoff'])
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class ComprehensionCheck2(Page):
form_model = 'player'
form_fields = ['comprehension_exclusion']
def is_displayed(self):
if self.session.config['comprehension_check'] == True and self.round_number == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def comprehension_exclusion_error_message(self, value):
if value == 0:
self.player.comprehension_exclusion_fail = 1
return "Your answer is incorrect. The party that is not in the coalition will never receive any money."
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class ComprehensionCheck3(Page):
form_model = 'player'
form_fields = ['comprehension_coalitions']
def is_displayed(self):
if self.session.config['comprehension_check'] == True and self.round_number == 1 and self.participant.vars[
'kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def comprehension_coalitions_choices(self):
choices = []
if not self.session.config['grand_coalition']:
choices = [
[0, 'AB and AC '],
[1, 'AB and BC'],
[2, 'AC and BC'],
[3, 'AB, AC and BC'],
]
elif self.session.config['grand_coalition']:
choices = [
[0, 'AB and AC '],
[1, 'AB and BC'],
[2, 'AC and BC'],
[3, 'AB, AC and BC'],
[4, 'AB, AC, BC and ABC'],
]
return choices
def comprehension_coalitions_error_message(self, value):
if self.session.config['grand_coalition'] == False and value != 3:
if value == 0:
self.player.comprehension_coalitions_fail = 0
if value == 1:
self.player.comprehension_coalitions_fail = 1
if value == 2:
self.player.comprehension_coalitions_fail = 2
return "Your answer is incorrect. The coalitions that can be formed are AB, AC and BC"
if self.session.config['grand_coalition'] == True and value != 4:
if value == 0:
self.player.comprehension_coalitions_fail = 0
if value == 1:
self.player.comprehension_coalitions_fail = 1
if value == 2:
self.player.comprehension_coalitions_fail = 2
if value == 3:
self.player.comprehension_coalitions_fail = 3
return "Your answer is incorrect. The coalitions that can be formed are AB, AC, BC and ABC"
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class ManipulationCheck2control(Page):
def is_displayed(self):
if self.round_number == 1 and self.participant.vars[
'kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
form_model = 'player'
form_fields = ['manipulation_check2control']
class ManipulationCheck2controlBudget(Page):
def is_displayed(self):
if self.round_number == 1 and self.participant.vars[
'kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
form_model = 'player'
form_fields = ['manipulation_check2controlbudget']
class BargainingStarts(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class NewRound(Page):
def vars_for_template(self):
return self.player.vars_for_template()
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.round_number > 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def before_next_page(self):
for player in self.group.get_players():
prev_player = player.in_round(1)
player.position = prev_player.position
player.resources = prev_player.resources
player.completion_code = prev_player.completion_code
self.player.leftover_check()
if self.timeout_happened:
self.participant.vars['kicked'] = True
def get_timeout_seconds(self):
return self.session.config['timeout_time']
class PhaseI(Page):
def vars_for_template(self):
return self.player.vars_for_template()
form_model = 'player'
form_fields = ['proposed_coalition', 'allocate_to_player_A', 'allocate_to_player_B', 'allocate_to_player_C']
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def before_next_page(self):
for p in self.group.get_players():
if p.proposed_coalition == 'BC':
p.allocate_to_player_A = 0
if p.proposed_coalition == 'AC':
p.allocate_to_player_B = 0
if p.proposed_coalition == 'AB':
p.allocate_to_player_C = 0
if p.position == 'A':
self.group.proposed_coalition_player_A = p.proposed_coalition
self.group.allocation_A_to_A = p.allocate_to_player_A
self.group.allocation_A_to_B = p.allocate_to_player_B
self.group.allocation_A_to_C = p.allocate_to_player_C
elif p.position == 'B':
self.group.proposed_coalition_player_B = p.proposed_coalition
self.group.allocation_B_to_A = p.allocate_to_player_A
self.group.allocation_B_to_B = p.allocate_to_player_B
self.group.allocation_B_to_C = p.allocate_to_player_C
elif p.position == 'C':
self.group.proposed_coalition_player_C = p.proposed_coalition
self.group.allocation_C_to_A = p.allocate_to_player_A
self.group.allocation_C_to_B = p.allocate_to_player_B
self.group.allocation_C_to_C = p.allocate_to_player_C
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
def get_timeout_seconds(self):
return self.session.config['timeout_time']
class WaitForOffers(WaitPage):
title_text = "Waiting for offers"
body_text = "Waiting until all participants have made an offer. This could take a while."
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
class OffersMade(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class PhaseII(Page):
form_model = 'player'
form_fields = ['selected_coalition']
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def vars_for_template(self):
vars = self.player.vars_for_template()
offers = dict()
for p in self.group.get_players():
pc = p.proposed_coalition
if pc and self.player.position in p.proposed_coalition:
summary = (
pc, p.offer_summary(), p.allocate_to_player_A, p.allocate_to_player_B, p.allocate_to_player_C,
p.id_in_group,)
summary_wo_id = summary[:-1]
offers[summary_wo_id] = summary
if self.session.config['select_none']:
offers['tuple'] = ("Select this option if you do not wish to select one of the above offers. You will not be able to form a coalition this round.", "No", 99, 99, 99, 'None')
vars.update({"offers": sorted(offers.values())})
return vars
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class WaitForSelection(WaitPage):
title_text = "Waiting for selection"
body_text = "Waiting until all participants have chosen an offer. This may take a while."
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.participant.vars['grouped'] == True and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def after_all_players_arrive(self):
players = self.group.get_players()
for p in players:
cs = p.selected_coalition
if p.selected_coalition != 'None':
p.selected_coalition_name = self.group.get_player_by_id(cs).proposed_coalition
p.selected_coalition_allocation_A = self.group.get_player_by_id(cs).allocate_to_player_A
p.selected_coalition_allocation_B = self.group.get_player_by_id(cs).allocate_to_player_B
p.selected_coalition_allocation_C = self.group.get_player_by_id(cs).allocate_to_player_C
elif p.selected_coalition == 'None':
p.selected_coalition_name = 'None'
p.selected_coalition_allocation_A = None
p.selected_coalition_allocation_B = None
p.selected_coalition_allocation_C = None
if p.position == 'A':
self.group.selected_coalition_name_player_A = p.selected_coalition_name
self.group.selected_coalition_allocation_A_player_A = p.selected_coalition_allocation_A
self.group.selected_coalition_allocation_B_player_A = p.selected_coalition_allocation_B
self.group.selected_coalition_allocation_C_player_A = p.selected_coalition_allocation_C
elif p.position == 'B':
self.group.selected_coalition_name_player_B = p.selected_coalition_name
self.group.selected_coalition_allocation_A_player_B = p.selected_coalition_allocation_A
self.group.selected_coalition_allocation_B_player_B = p.selected_coalition_allocation_B
self.group.selected_coalition_allocation_C_player_B = p.selected_coalition_allocation_C
elif p.position == 'C':
self.group.selected_coalition_name_player_C = p.selected_coalition_name
self.group.selected_coalition_allocation_A_player_C = p.selected_coalition_allocation_A
self.group.selected_coalition_allocation_B_player_C = p.selected_coalition_allocation_B
self.group.selected_coalition_allocation_C_player_C = p.selected_coalition_allocation_C
list_AB = []
list_AC = []
list_BC = []
list_ABC = []
list_none = []
for p in players:
if p.selected_coalition_name == "AB":
list_AB.append(p.position)
if p.selected_coalition_name == "AC":
list_AC.append(p.position)
if p.selected_coalition_name == "BC":
list_BC.append(p.position)
if p.selected_coalition_name == "ABC":
list_ABC.append(p.position)
if p.selected_coalition_name == None:
list_none.append(p.position)
if len(
list_AB) == 2 and self.group.selected_coalition_allocation_A_player_A == self.group.selected_coalition_allocation_A_player_B:
if self.group.selected_coalition_allocation_B_player_A == self.group.selected_coalition_allocation_B_player_B:
print("AB is formed")
self.group.coalition_formed = True
self.group.formed_coalition_name = "AB"
self.group.payoff_A = self.group.selected_coalition_allocation_A_player_A
self.group.payoff_B = self.group.selected_coalition_allocation_B_player_B
self.group.payoff_C = 0
elif len(
list_AC) == 2 and self.group.selected_coalition_allocation_A_player_A == self.group.selected_coalition_allocation_A_player_C:
if self.group.selected_coalition_allocation_C_player_A == self.group.selected_coalition_allocation_C_player_C:
print("AC is formed")
self.group.coalition_formed = True
self.group.formed_coalition_name = "AC"
self.group.payoff_A = self.group.selected_coalition_allocation_A_player_A
self.group.payoff_B = 0
self.group.payoff_C = self.group.selected_coalition_allocation_C_player_C
elif len(
list_BC) == 2 and self.group.selected_coalition_allocation_B_player_B == self.group.selected_coalition_allocation_B_player_C:
print("First part for B works")
if self.group.selected_coalition_allocation_C_player_B == self.group.selected_coalition_allocation_C_player_C:
print("BC is formed")
self.group.coalition_formed = True
self.group.formed_coalition_name = "BC"
self.group.payoff_A = 0
self.group.payoff_B = self.group.selected_coalition_allocation_B_player_B
self.group.payoff_C = self.group.selected_coalition_allocation_C_player_C
elif len(
list_ABC) == 3 and self.group.selected_coalition_allocation_A_player_A == self.group.selected_coalition_allocation_A_player_B == self.group.selected_coalition_allocation_A_player_C:
if self.group.selected_coalition_allocation_B_player_A == self.group.selected_coalition_allocation_B_player_B == self.group.selected_coalition_allocation_B_player_C:
if self.group.selected_coalition_allocation_C_player_A == self.group.selected_coalition_allocation_C_player_B == self.group.selected_coalition_allocation_C_player_C:
self.group.coalition_formed = True
self.group.formed_coalition_name = "ABC"
self.group.payoff_A = self.group.selected_coalition_allocation_A_player_A
self.group.payoff_B = self.group.selected_coalition_allocation_B_player_B
self.group.payoff_C = self.group.selected_coalition_allocation_C_player_C
else:
self.group.coalition_formed = False
self.group.payoff_A = 0
self.group.payoff_B = 0
self.group.payoff_C = 0
for p in players:
if p.position == "A":
p.money = self.group.payoff_A
p.payoff = self.group.payoff_A * self.session.config['payoff_conversion']
if p.position == "B":
p.money = self.group.payoff_B
p.payoff = self.group.payoff_B * self.session.config['payoff_conversion']
if p.position == "C":
p.money = self.group.payoff_C
p.payoff = self.group.payoff_C * self.session.config['payoff_conversion']
class OffersSelected(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.participant.vars['kicked'] == False and \
self.participant.vars['leftover'] == False:
return True
else:
return False
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class PhaseIII_Success(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.group.coalition_formed == 1 and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def vars_for_template(self):
offers_dict = dict()
vars = self.player.vars_for_template()
prop_name_A = self.group.proposed_coalition_player_A
prop_name_B = self.group.proposed_coalition_player_B
prop_name_C = self.group.proposed_coalition_player_C
prop_A_to_A = self.group.allocation_A_to_A
prop_A_to_B = self.group.allocation_A_to_B
prop_A_to_C = self.group.allocation_A_to_C
prop_B_to_A = self.group.allocation_B_to_A
prop_B_to_B = self.group.allocation_B_to_B
prop_B_to_C = self.group.allocation_B_to_C
prop_C_to_A = self.group.allocation_C_to_A
prop_C_to_B = self.group.allocation_C_to_B
prop_C_to_C = self.group.allocation_C_to_C
sel_name_A = self.group.selected_coalition_name_player_A
sel_name_B = self.group.selected_coalition_name_player_B
sel_name_C = self.group.selected_coalition_name_player_C
sel_A_to_A = self.group.selected_coalition_allocation_A_player_A
sel_A_to_B = self.group.selected_coalition_allocation_B_player_A
sel_A_to_C = self.group.selected_coalition_allocation_C_player_A
sel_B_to_A = self.group.selected_coalition_allocation_A_player_B
sel_B_to_B = self.group.selected_coalition_allocation_B_player_B
sel_B_to_C = self.group.selected_coalition_allocation_C_player_B
sel_C_to_A = self.group.selected_coalition_allocation_A_player_C
sel_C_to_B = self.group.selected_coalition_allocation_B_player_C
sel_C_to_C = self.group.selected_coalition_allocation_C_player_C
proposed_by_A = 0
proposed_by_B = 0
proposed_by_C = 0
selected_by_A = 0
selected_by_B = 0
selected_by_C = 0
offer_A = [self.group.proposed_coalition_player_A, self.group.allocation_A_to_A, self.group.allocation_A_to_B,
self.group.allocation_A_to_C, proposed_by_A, proposed_by_B, proposed_by_C, selected_by_A,
selected_by_B, selected_by_C]
offer_B = [self.group.proposed_coalition_player_B, self.group.allocation_B_to_A, self.group.allocation_B_to_B,
self.group.allocation_B_to_C, proposed_by_A, proposed_by_B, proposed_by_C, selected_by_A,
selected_by_B, selected_by_C]
offer_C = [self.group.proposed_coalition_player_C, self.group.allocation_C_to_A, self.group.allocation_C_to_B,
self.group.allocation_C_to_C, proposed_by_A, proposed_by_B, proposed_by_C, selected_by_A,
selected_by_B, selected_by_C]
offers = (offer_A, offer_B, offer_C)
for offer in offers:
if offer[0] == prop_name_A and offer[1] == prop_A_to_A and offer[2] == prop_A_to_B and offer[
3] == prop_A_to_C:
offer[4] = 1
if offer[0] == prop_name_B and offer[1] == prop_B_to_A and offer[2] == prop_B_to_B and offer[
3] == prop_B_to_C:
offer[5] = 1
if offer[0] == prop_name_C and offer[1] == prop_C_to_A and offer[2] == prop_C_to_B and offer[
3] == prop_C_to_C:
offer[6] = 1
if offer[0] == sel_name_A and offer[1] == sel_A_to_A and offer[2] == sel_A_to_B and offer[3] == sel_A_to_C:
offer[7] = 1
if offer[0] == sel_name_B and offer[1] == sel_B_to_A and offer[2] == sel_B_to_B and offer[3] == sel_B_to_C:
offer[8] = 1
if offer[0] == sel_name_C and offer[1] == sel_C_to_A and offer[2] == sel_C_to_B and offer[3] == sel_C_to_C:
offer[9] = 1
offer_dict = (offer[0], offer[1], offer[2], offer[3], offer[4], offer[5], offer[6],
offer[7], offer[8], offer[9],)
offers_dict[offer_dict] = offer_dict
vars.update({"offers_dictionary": sorted(offers_dict.values())})
return vars
def before_next_page(self):
for p in self.group.get_players():
if p.position == 'A':
p.payoff = self.group.payoff_A * self.session.config['payoff_conversion']
if p.position == 'B':
p.payoff = self.group.payoff_B * self.session.config['payoff_conversion']
if p.position == 'C':
p.payoff = self.group.payoff_C * self.session.config['payoff_conversion']
class Payoff(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.group.coalition_formed == True and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def vars_for_template(self):
vars = self.player.vars_for_template()
return vars
class PhaseIII_Failure(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.group.coalition_formed == False and self.participant.vars['kicked'] == False and self.participant.vars['leftover'] == False:
return True
else:
return False
def vars_for_template(self):
offers_dict = dict()
vars = self.player.vars_for_template()
prop_name_A = self.group.proposed_coalition_player_A
prop_name_B = self.group.proposed_coalition_player_B
prop_name_C = self.group.proposed_coalition_player_C
prop_A_to_A = self.group.allocation_A_to_A
prop_A_to_B = self.group.allocation_A_to_B
prop_A_to_C = self.group.allocation_A_to_C
prop_B_to_A = self.group.allocation_B_to_A
prop_B_to_B = self.group.allocation_B_to_B
prop_B_to_C = self.group.allocation_B_to_C
prop_C_to_A = self.group.allocation_C_to_A
prop_C_to_B = self.group.allocation_C_to_B
prop_C_to_C = self.group.allocation_C_to_C
sel_name_A = self.group.selected_coalition_name_player_A
sel_name_B = self.group.selected_coalition_name_player_B
sel_name_C = self.group.selected_coalition_name_player_C
sel_A_to_A = self.group.selected_coalition_allocation_A_player_A
sel_A_to_B = self.group.selected_coalition_allocation_B_player_A
sel_A_to_C = self.group.selected_coalition_allocation_C_player_A
sel_B_to_A = self.group.selected_coalition_allocation_A_player_B
sel_B_to_B = self.group.selected_coalition_allocation_B_player_B
sel_B_to_C = self.group.selected_coalition_allocation_C_player_B
sel_C_to_A = self.group.selected_coalition_allocation_A_player_C
sel_C_to_B = self.group.selected_coalition_allocation_B_player_C
sel_C_to_C = self.group.selected_coalition_allocation_C_player_C
proposed_by_A = 0
proposed_by_B = 0
proposed_by_C = 0
selected_by_A = 0
selected_by_B = 0
selected_by_C = 0
offer_A = [self.group.proposed_coalition_player_A, self.group.allocation_A_to_A, self.group.allocation_A_to_B,
self.group.allocation_A_to_C, proposed_by_A, proposed_by_B, proposed_by_C, selected_by_A,
selected_by_B, selected_by_C]
offer_B = [self.group.proposed_coalition_player_B, self.group.allocation_B_to_A, self.group.allocation_B_to_B,
self.group.allocation_B_to_C, proposed_by_A, proposed_by_B, proposed_by_C, selected_by_A,
selected_by_B, selected_by_C]
offer_C = [self.group.proposed_coalition_player_C, self.group.allocation_C_to_A, self.group.allocation_C_to_B,
self.group.allocation_C_to_C, proposed_by_A, proposed_by_B, proposed_by_C, selected_by_A,
selected_by_B, selected_by_C]
offers = (offer_A, offer_B, offer_C)
for offer in offers:
if offer[0] == prop_name_A and offer[1] == prop_A_to_A and offer[2] == prop_A_to_B and offer[
3] == prop_A_to_C:
offer[4] = 1
if offer[0] == prop_name_B and offer[1] == prop_B_to_A and offer[2] == prop_B_to_B and offer[
3] == prop_B_to_C:
offer[5] = 1
if offer[0] == prop_name_C and offer[1] == prop_C_to_A and offer[2] == prop_C_to_B and offer[
3] == prop_C_to_C:
offer[6] = 1
if offer[0] == sel_name_A and offer[1] == sel_A_to_A and offer[2] == sel_A_to_B and offer[3] == sel_A_to_C:
offer[7] = 1
if offer[0] == sel_name_B and offer[1] == sel_B_to_A and offer[2] == sel_B_to_B and offer[3] == sel_B_to_C:
offer[8] = 1
if offer[0] == sel_name_C and offer[1] == sel_C_to_A and offer[2] == sel_C_to_B and offer[3] == sel_C_to_C:
offer[9] = 1
offer_dict = (offer[0], offer[1], offer[2], offer[3], offer[4], offer[5], offer[6],
offer[7], offer[8], offer[9],)
offers_dict[offer_dict] = offer_dict
vars.update({"offers_dictionary": sorted(offers_dict.values())})
return vars
def get_timeout_seconds(self):
return self.session.config['timeout_time']
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['kicked'] = True
self.player.leftover_check()
class Leftover(Page):
def is_displayed(self):
if self.participant.vars['grouped'] == False and self.participant.vars['kicked'] == False and self.participant.vars['end_game'] == False and self.round_number == 1:
return True
elif self.participant.vars['leftover'] == True and self.participant.vars['kicked'] == False and self.participant.vars['end_game'] == False and self.round_number == 1:
return True
else:
return False
def vars_for_template(self):
return self.player.vars_for_template()
def before_next_page(self):
self.participant.vars['leftover'] = True
class ID(Page):
def is_displayed(self):
if self.participant.vars['end_game'] == False and self.group.coalition_formed == True and self.participant.vars[
'kicked'] == False:
return True
elif self.subsession.round_number == Constants.num_rounds and self.participant.vars['end_game'] == False and self.participant.vars['kicked'] == False:
return True
elif self.participant.vars['leftover'] == True and self.participant.vars['kicked'] == False and self.participant.vars['end_game'] == False:
return True
else:
return False
def before_next_page(self):
self.participant.vars['end_game'] = True
class Kicked(Page):
def is_displayed(self):
if self.participant.vars['kicked'] == True:
return True
else:
return False
def vars_for_template(self):
return self.player.vars_for_template()
page_sequence = [
Waitforgroup,
Groupingconfirmation,
Sliderinstructions,
Slider,
EndRound1,
Slider2,
EndRound2,
Slider3,
Waitforparticipants,
PositionAssignment,
AssignedPosition,
InstructionsCoalitions,
ComprehensionCheck,
ComprehensionCheck2,
ComprehensionCheck3,
BargainingStarts,
NewRound,
PhaseI,
WaitForOffers,
OffersMade,
PhaseII,
WaitForSelection,
OffersSelected,
PhaseIII_Success,
Payoff,
PhaseIII_Failure,
Leftover,
ID,
Kicked
]
| 41.180569
| 1,396
| 0.633082
| 5,389
| 41,963
| 4.641677
| 0.056411
| 0.063325
| 0.086592
| 0.058927
| 0.835212
| 0.809747
| 0.783281
| 0.758735
| 0.734509
| 0.724474
| 0
| 0.012273
| 0.271882
| 41,963
| 1,018
| 1,397
| 41.221022
| 0.806408
| 0
| 0
| 0.64981
| 0
| 0.001264
| 0.073367
| 0.003541
| 0
| 0
| 0
| 0
| 0
| 1
| 0.128951
| false
| 0
| 0.006321
| 0.050569
| 0.355247
| 0.005057
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fdf262a370686656f6856c3b37fa7fbdf266f7f
| 183
|
py
|
Python
|
functions.py
|
nickolaslai/splash_spring_2018
|
e969a8b15015cef2ca67c852fb2b1b0696efb29c
|
[
"MIT"
] | null | null | null |
functions.py
|
nickolaslai/splash_spring_2018
|
e969a8b15015cef2ca67c852fb2b1b0696efb29c
|
[
"MIT"
] | null | null | null |
functions.py
|
nickolaslai/splash_spring_2018
|
e969a8b15015cef2ca67c852fb2b1b0696efb29c
|
[
"MIT"
] | null | null | null |
#write your code here!
def factorial(i):
return 0
#write your code here!
def fibonacci(i):
return 0
#write your code here!
def fizzbuzz(i):
return "Not implemented yet"
| 15.25
| 32
| 0.688525
| 29
| 183
| 4.344828
| 0.482759
| 0.214286
| 0.309524
| 0.404762
| 0.603175
| 0.444444
| 0.444444
| 0.444444
| 0
| 0
| 0
| 0.013986
| 0.218579
| 183
| 11
| 33
| 16.636364
| 0.867133
| 0.344262
| 0
| 0.333333
| 0
| 0
| 0.162393
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
6feb595690ee2f4a0b145c99cda7a5112151a407
| 74
|
py
|
Python
|
multilingual_t5/r_indic_corp_bn/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
multilingual_t5/r_indic_corp_bn/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
multilingual_t5/r_indic_corp_bn/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
"""r_indic_corp_bn dataset."""
from .r_indic_corp_bn import RIndicCorpBn
| 18.5
| 41
| 0.797297
| 12
| 74
| 4.416667
| 0.666667
| 0.226415
| 0.377358
| 0.45283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094595
| 74
| 3
| 42
| 24.666667
| 0.791045
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b5146151d42521a53e8432e57f5b26df3ad8ebf2
| 57,249
|
py
|
Python
|
sdk/python/pulumi_kubernetes/autoscaling/v2beta1/_inputs.py
|
hazsetata/pulumi-kubernetes
|
e3aa3027fa3bb268c496af174b59a9913ae8094e
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_kubernetes/autoscaling/v2beta1/_inputs.py
|
hazsetata/pulumi-kubernetes
|
e3aa3027fa3bb268c496af174b59a9913ae8094e
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_kubernetes/autoscaling/v2beta1/_inputs.py
|
hazsetata/pulumi-kubernetes
|
e3aa3027fa3bb268c496af174b59a9913ae8094e
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from ... import _utilities, _tables
from ... import meta as _meta
__all__ = [
'CrossVersionObjectReferenceArgs',
'ExternalMetricSourceArgs',
'ExternalMetricStatusArgs',
'HorizontalPodAutoscalerArgs',
'HorizontalPodAutoscalerConditionArgs',
'HorizontalPodAutoscalerSpecArgs',
'HorizontalPodAutoscalerStatusArgs',
'MetricSpecArgs',
'MetricStatusArgs',
'ObjectMetricSourceArgs',
'ObjectMetricStatusArgs',
'PodsMetricSourceArgs',
'PodsMetricStatusArgs',
'ResourceMetricSourceArgs',
'ResourceMetricStatusArgs',
]
@pulumi.input_type
class CrossVersionObjectReferenceArgs:
def __init__(__self__, *,
kind: pulumi.Input[str],
name: pulumi.Input[str],
api_version: Optional[pulumi.Input[str]] = None):
"""
CrossVersionObjectReference contains enough information to let you identify the referred resource.
:param pulumi.Input[str] kind: Kind of the referent; More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds"
:param pulumi.Input[str] name: Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names
:param pulumi.Input[str] api_version: API version of the referent
"""
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "name", name)
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
@property
@pulumi.getter
def kind(self) -> pulumi.Input[str]:
"""
Kind of the referent; More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds"
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: pulumi.Input[str]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@pulumi.input_type
class ExternalMetricSourceArgs:
def __init__(__self__, *,
metric_name: pulumi.Input[str],
metric_selector: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']] = None,
target_average_value: Optional[pulumi.Input[str]] = None,
target_value: Optional[pulumi.Input[str]] = None):
"""
ExternalMetricSource indicates how to scale on a metric not associated with any Kubernetes object (for example length of queue in cloud messaging service, or QPS from loadbalancer running outside of cluster). Exactly one "target" type should be set.
:param pulumi.Input[str] metric_name: metricName is the name of the metric in question.
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] metric_selector: metricSelector is used to identify a specific time series within a given metric.
:param pulumi.Input[str] target_average_value: targetAverageValue is the target per-pod value of global metric (as a quantity). Mutually exclusive with TargetValue.
:param pulumi.Input[str] target_value: targetValue is the target value of the metric (as a quantity). Mutually exclusive with TargetAverageValue.
"""
pulumi.set(__self__, "metric_name", metric_name)
if metric_selector is not None:
pulumi.set(__self__, "metric_selector", metric_selector)
if target_average_value is not None:
pulumi.set(__self__, "target_average_value", target_average_value)
if target_value is not None:
pulumi.set(__self__, "target_value", target_value)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> pulumi.Input[str]:
"""
metricName is the name of the metric in question.
"""
return pulumi.get(self, "metric_name")
@metric_name.setter
def metric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "metric_name", value)
@property
@pulumi.getter(name="metricSelector")
def metric_selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]:
"""
metricSelector is used to identify a specific time series within a given metric.
"""
return pulumi.get(self, "metric_selector")
@metric_selector.setter
def metric_selector(self, value: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]):
pulumi.set(self, "metric_selector", value)
@property
@pulumi.getter(name="targetAverageValue")
def target_average_value(self) -> Optional[pulumi.Input[str]]:
"""
targetAverageValue is the target per-pod value of global metric (as a quantity). Mutually exclusive with TargetValue.
"""
return pulumi.get(self, "target_average_value")
@target_average_value.setter
def target_average_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_average_value", value)
@property
@pulumi.getter(name="targetValue")
def target_value(self) -> Optional[pulumi.Input[str]]:
"""
targetValue is the target value of the metric (as a quantity). Mutually exclusive with TargetAverageValue.
"""
return pulumi.get(self, "target_value")
@target_value.setter
def target_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_value", value)
@pulumi.input_type
class ExternalMetricStatusArgs:
def __init__(__self__, *,
current_value: pulumi.Input[str],
metric_name: pulumi.Input[str],
current_average_value: Optional[pulumi.Input[str]] = None,
metric_selector: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']] = None):
"""
ExternalMetricStatus indicates the current value of a global metric not associated with any Kubernetes object.
:param pulumi.Input[str] current_value: currentValue is the current value of the metric (as a quantity)
:param pulumi.Input[str] metric_name: metricName is the name of a metric used for autoscaling in metric system.
:param pulumi.Input[str] current_average_value: currentAverageValue is the current value of metric averaged over autoscaled pods.
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] metric_selector: metricSelector is used to identify a specific time series within a given metric.
"""
pulumi.set(__self__, "current_value", current_value)
pulumi.set(__self__, "metric_name", metric_name)
if current_average_value is not None:
pulumi.set(__self__, "current_average_value", current_average_value)
if metric_selector is not None:
pulumi.set(__self__, "metric_selector", metric_selector)
@property
@pulumi.getter(name="currentValue")
def current_value(self) -> pulumi.Input[str]:
"""
currentValue is the current value of the metric (as a quantity)
"""
return pulumi.get(self, "current_value")
@current_value.setter
def current_value(self, value: pulumi.Input[str]):
pulumi.set(self, "current_value", value)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> pulumi.Input[str]:
"""
metricName is the name of a metric used for autoscaling in metric system.
"""
return pulumi.get(self, "metric_name")
@metric_name.setter
def metric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "metric_name", value)
@property
@pulumi.getter(name="currentAverageValue")
def current_average_value(self) -> Optional[pulumi.Input[str]]:
"""
currentAverageValue is the current value of metric averaged over autoscaled pods.
"""
return pulumi.get(self, "current_average_value")
@current_average_value.setter
def current_average_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "current_average_value", value)
@property
@pulumi.getter(name="metricSelector")
def metric_selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]:
"""
metricSelector is used to identify a specific time series within a given metric.
"""
return pulumi.get(self, "metric_selector")
@metric_selector.setter
def metric_selector(self, value: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]):
pulumi.set(self, "metric_selector", value)
@pulumi.input_type
class HorizontalPodAutoscalerArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['HorizontalPodAutoscalerSpecArgs']] = None,
status: Optional[pulumi.Input['HorizontalPodAutoscalerStatusArgs']] = None):
"""
HorizontalPodAutoscaler is the configuration for a horizontal pod autoscaler, which automatically manages the replica count of any resource implementing the scale subresource based on the metrics specified.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: metadata is the standard object metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['HorizontalPodAutoscalerSpecArgs'] spec: spec is the specification for the behaviour of the autoscaler. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status.
:param pulumi.Input['HorizontalPodAutoscalerStatusArgs'] status: status is the current information about the autoscaler.
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'autoscaling/v2beta1')
if kind is not None:
pulumi.set(__self__, "kind", 'HorizontalPodAutoscaler')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
metadata is the standard object metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['HorizontalPodAutoscalerSpecArgs']]:
"""
spec is the specification for the behaviour of the autoscaler. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['HorizontalPodAutoscalerSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['HorizontalPodAutoscalerStatusArgs']]:
"""
status is the current information about the autoscaler.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['HorizontalPodAutoscalerStatusArgs']]):
pulumi.set(self, "status", value)
@pulumi.input_type
class HorizontalPodAutoscalerConditionArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
"""
HorizontalPodAutoscalerCondition describes the state of a HorizontalPodAutoscaler at a certain point.
:param pulumi.Input[str] status: status is the status of the condition (True, False, Unknown)
:param pulumi.Input[str] type: type describes the current condition
:param pulumi.Input[str] last_transition_time: lastTransitionTime is the last time the condition transitioned from one status to another
:param pulumi.Input[str] message: message is a human-readable explanation containing details about the transition
:param pulumi.Input[str] reason: reason is the reason for the condition's last transition.
"""
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
"""
status is the status of the condition (True, False, Unknown)
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
type describes the current condition
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
"""
lastTransitionTime is the last time the condition transitioned from one status to another
"""
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
message is a human-readable explanation containing details about the transition
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
"""
reason is the reason for the condition's last transition.
"""
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class HorizontalPodAutoscalerSpecArgs:
def __init__(__self__, *,
max_replicas: pulumi.Input[float],
scale_target_ref: pulumi.Input['CrossVersionObjectReferenceArgs'],
metrics: Optional[pulumi.Input[List[pulumi.Input['MetricSpecArgs']]]] = None,
min_replicas: Optional[pulumi.Input[float]] = None):
"""
HorizontalPodAutoscalerSpec describes the desired functionality of the HorizontalPodAutoscaler.
:param pulumi.Input[float] max_replicas: maxReplicas is the upper limit for the number of replicas to which the autoscaler can scale up. It cannot be less that minReplicas.
:param pulumi.Input['CrossVersionObjectReferenceArgs'] scale_target_ref: scaleTargetRef points to the target resource to scale, and is used to the pods for which metrics should be collected, as well as to actually change the replica count.
:param pulumi.Input[List[pulumi.Input['MetricSpecArgs']]] metrics: metrics contains the specifications for which to use to calculate the desired replica count (the maximum replica count across all metrics will be used). The desired replica count is calculated multiplying the ratio between the target value and the current value by the current number of pods. Ergo, metrics used must decrease as the pod count is increased, and vice-versa. See the individual metric source types for more information about how each type of metric must respond.
:param pulumi.Input[float] min_replicas: minReplicas is the lower limit for the number of replicas to which the autoscaler can scale down. It defaults to 1 pod. minReplicas is allowed to be 0 if the alpha feature gate HPAScaleToZero is enabled and at least one Object or External metric is configured. Scaling is active as long as at least one metric value is available.
"""
pulumi.set(__self__, "max_replicas", max_replicas)
pulumi.set(__self__, "scale_target_ref", scale_target_ref)
if metrics is not None:
pulumi.set(__self__, "metrics", metrics)
if min_replicas is not None:
pulumi.set(__self__, "min_replicas", min_replicas)
@property
@pulumi.getter(name="maxReplicas")
def max_replicas(self) -> pulumi.Input[float]:
"""
maxReplicas is the upper limit for the number of replicas to which the autoscaler can scale up. It cannot be less that minReplicas.
"""
return pulumi.get(self, "max_replicas")
@max_replicas.setter
def max_replicas(self, value: pulumi.Input[float]):
pulumi.set(self, "max_replicas", value)
@property
@pulumi.getter(name="scaleTargetRef")
def scale_target_ref(self) -> pulumi.Input['CrossVersionObjectReferenceArgs']:
"""
scaleTargetRef points to the target resource to scale, and is used to the pods for which metrics should be collected, as well as to actually change the replica count.
"""
return pulumi.get(self, "scale_target_ref")
@scale_target_ref.setter
def scale_target_ref(self, value: pulumi.Input['CrossVersionObjectReferenceArgs']):
pulumi.set(self, "scale_target_ref", value)
@property
@pulumi.getter
def metrics(self) -> Optional[pulumi.Input[List[pulumi.Input['MetricSpecArgs']]]]:
"""
metrics contains the specifications for which to use to calculate the desired replica count (the maximum replica count across all metrics will be used). The desired replica count is calculated multiplying the ratio between the target value and the current value by the current number of pods. Ergo, metrics used must decrease as the pod count is increased, and vice-versa. See the individual metric source types for more information about how each type of metric must respond.
"""
return pulumi.get(self, "metrics")
@metrics.setter
def metrics(self, value: Optional[pulumi.Input[List[pulumi.Input['MetricSpecArgs']]]]):
pulumi.set(self, "metrics", value)
@property
@pulumi.getter(name="minReplicas")
def min_replicas(self) -> Optional[pulumi.Input[float]]:
"""
minReplicas is the lower limit for the number of replicas to which the autoscaler can scale down. It defaults to 1 pod. minReplicas is allowed to be 0 if the alpha feature gate HPAScaleToZero is enabled and at least one Object or External metric is configured. Scaling is active as long as at least one metric value is available.
"""
return pulumi.get(self, "min_replicas")
@min_replicas.setter
def min_replicas(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "min_replicas", value)
@pulumi.input_type
class HorizontalPodAutoscalerStatusArgs:
def __init__(__self__, *,
conditions: pulumi.Input[List[pulumi.Input['HorizontalPodAutoscalerConditionArgs']]],
current_replicas: pulumi.Input[float],
desired_replicas: pulumi.Input[float],
current_metrics: Optional[pulumi.Input[List[pulumi.Input['MetricStatusArgs']]]] = None,
last_scale_time: Optional[pulumi.Input[str]] = None,
observed_generation: Optional[pulumi.Input[float]] = None):
"""
HorizontalPodAutoscalerStatus describes the current status of a horizontal pod autoscaler.
:param pulumi.Input[List[pulumi.Input['HorizontalPodAutoscalerConditionArgs']]] conditions: conditions is the set of conditions required for this autoscaler to scale its target, and indicates whether or not those conditions are met.
:param pulumi.Input[float] current_replicas: currentReplicas is current number of replicas of pods managed by this autoscaler, as last seen by the autoscaler.
:param pulumi.Input[float] desired_replicas: desiredReplicas is the desired number of replicas of pods managed by this autoscaler, as last calculated by the autoscaler.
:param pulumi.Input[List[pulumi.Input['MetricStatusArgs']]] current_metrics: currentMetrics is the last read state of the metrics used by this autoscaler.
:param pulumi.Input[str] last_scale_time: lastScaleTime is the last time the HorizontalPodAutoscaler scaled the number of pods, used by the autoscaler to control how often the number of pods is changed.
:param pulumi.Input[float] observed_generation: observedGeneration is the most recent generation observed by this autoscaler.
"""
pulumi.set(__self__, "conditions", conditions)
pulumi.set(__self__, "current_replicas", current_replicas)
pulumi.set(__self__, "desired_replicas", desired_replicas)
if current_metrics is not None:
pulumi.set(__self__, "current_metrics", current_metrics)
if last_scale_time is not None:
pulumi.set(__self__, "last_scale_time", last_scale_time)
if observed_generation is not None:
pulumi.set(__self__, "observed_generation", observed_generation)
@property
@pulumi.getter
def conditions(self) -> pulumi.Input[List[pulumi.Input['HorizontalPodAutoscalerConditionArgs']]]:
"""
conditions is the set of conditions required for this autoscaler to scale its target, and indicates whether or not those conditions are met.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: pulumi.Input[List[pulumi.Input['HorizontalPodAutoscalerConditionArgs']]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="currentReplicas")
def current_replicas(self) -> pulumi.Input[float]:
"""
currentReplicas is current number of replicas of pods managed by this autoscaler, as last seen by the autoscaler.
"""
return pulumi.get(self, "current_replicas")
@current_replicas.setter
def current_replicas(self, value: pulumi.Input[float]):
pulumi.set(self, "current_replicas", value)
@property
@pulumi.getter(name="desiredReplicas")
def desired_replicas(self) -> pulumi.Input[float]:
"""
desiredReplicas is the desired number of replicas of pods managed by this autoscaler, as last calculated by the autoscaler.
"""
return pulumi.get(self, "desired_replicas")
@desired_replicas.setter
def desired_replicas(self, value: pulumi.Input[float]):
pulumi.set(self, "desired_replicas", value)
@property
@pulumi.getter(name="currentMetrics")
def current_metrics(self) -> Optional[pulumi.Input[List[pulumi.Input['MetricStatusArgs']]]]:
"""
currentMetrics is the last read state of the metrics used by this autoscaler.
"""
return pulumi.get(self, "current_metrics")
@current_metrics.setter
def current_metrics(self, value: Optional[pulumi.Input[List[pulumi.Input['MetricStatusArgs']]]]):
pulumi.set(self, "current_metrics", value)
@property
@pulumi.getter(name="lastScaleTime")
def last_scale_time(self) -> Optional[pulumi.Input[str]]:
"""
lastScaleTime is the last time the HorizontalPodAutoscaler scaled the number of pods, used by the autoscaler to control how often the number of pods is changed.
"""
return pulumi.get(self, "last_scale_time")
@last_scale_time.setter
def last_scale_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_scale_time", value)
@property
@pulumi.getter(name="observedGeneration")
def observed_generation(self) -> Optional[pulumi.Input[float]]:
"""
observedGeneration is the most recent generation observed by this autoscaler.
"""
return pulumi.get(self, "observed_generation")
@observed_generation.setter
def observed_generation(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "observed_generation", value)
@pulumi.input_type
class MetricSpecArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
external: Optional[pulumi.Input['ExternalMetricSourceArgs']] = None,
object: Optional[pulumi.Input['ObjectMetricSourceArgs']] = None,
pods: Optional[pulumi.Input['PodsMetricSourceArgs']] = None,
resource: Optional[pulumi.Input['ResourceMetricSourceArgs']] = None):
"""
MetricSpec specifies how to scale based on a single metric (only `type` and one other matching field should be set at once).
:param pulumi.Input[str] type: type is the type of metric source. It should be one of "Object", "Pods" or "Resource", each mapping to a matching field in the object.
:param pulumi.Input['ExternalMetricSourceArgs'] external: external refers to a global metric that is not associated with any Kubernetes object. It allows autoscaling based on information coming from components running outside of cluster (for example length of queue in cloud messaging service, or QPS from loadbalancer running outside of cluster).
:param pulumi.Input['ObjectMetricSourceArgs'] object: object refers to a metric describing a single kubernetes object (for example, hits-per-second on an Ingress object).
:param pulumi.Input['PodsMetricSourceArgs'] pods: pods refers to a metric describing each pod in the current scale target (for example, transactions-processed-per-second). The values will be averaged together before being compared to the target value.
:param pulumi.Input['ResourceMetricSourceArgs'] resource: resource refers to a resource metric (such as those specified in requests and limits) known to Kubernetes describing each pod in the current scale target (e.g. CPU or memory). Such metrics are built in to Kubernetes, and have special scaling options on top of those available to normal per-pod metrics using the "pods" source.
"""
pulumi.set(__self__, "type", type)
if external is not None:
pulumi.set(__self__, "external", external)
if object is not None:
pulumi.set(__self__, "object", object)
if pods is not None:
pulumi.set(__self__, "pods", pods)
if resource is not None:
pulumi.set(__self__, "resource", resource)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
type is the type of metric source. It should be one of "Object", "Pods" or "Resource", each mapping to a matching field in the object.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def external(self) -> Optional[pulumi.Input['ExternalMetricSourceArgs']]:
"""
external refers to a global metric that is not associated with any Kubernetes object. It allows autoscaling based on information coming from components running outside of cluster (for example length of queue in cloud messaging service, or QPS from loadbalancer running outside of cluster).
"""
return pulumi.get(self, "external")
@external.setter
def external(self, value: Optional[pulumi.Input['ExternalMetricSourceArgs']]):
pulumi.set(self, "external", value)
@property
@pulumi.getter
def object(self) -> Optional[pulumi.Input['ObjectMetricSourceArgs']]:
"""
object refers to a metric describing a single kubernetes object (for example, hits-per-second on an Ingress object).
"""
return pulumi.get(self, "object")
@object.setter
def object(self, value: Optional[pulumi.Input['ObjectMetricSourceArgs']]):
pulumi.set(self, "object", value)
@property
@pulumi.getter
def pods(self) -> Optional[pulumi.Input['PodsMetricSourceArgs']]:
"""
pods refers to a metric describing each pod in the current scale target (for example, transactions-processed-per-second). The values will be averaged together before being compared to the target value.
"""
return pulumi.get(self, "pods")
@pods.setter
def pods(self, value: Optional[pulumi.Input['PodsMetricSourceArgs']]):
pulumi.set(self, "pods", value)
@property
@pulumi.getter
def resource(self) -> Optional[pulumi.Input['ResourceMetricSourceArgs']]:
"""
resource refers to a resource metric (such as those specified in requests and limits) known to Kubernetes describing each pod in the current scale target (e.g. CPU or memory). Such metrics are built in to Kubernetes, and have special scaling options on top of those available to normal per-pod metrics using the "pods" source.
"""
return pulumi.get(self, "resource")
@resource.setter
def resource(self, value: Optional[pulumi.Input['ResourceMetricSourceArgs']]):
pulumi.set(self, "resource", value)
@pulumi.input_type
class MetricStatusArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
external: Optional[pulumi.Input['ExternalMetricStatusArgs']] = None,
object: Optional[pulumi.Input['ObjectMetricStatusArgs']] = None,
pods: Optional[pulumi.Input['PodsMetricStatusArgs']] = None,
resource: Optional[pulumi.Input['ResourceMetricStatusArgs']] = None):
"""
MetricStatus describes the last-read state of a single metric.
:param pulumi.Input[str] type: type is the type of metric source. It will be one of "Object", "Pods" or "Resource", each corresponds to a matching field in the object.
:param pulumi.Input['ExternalMetricStatusArgs'] external: external refers to a global metric that is not associated with any Kubernetes object. It allows autoscaling based on information coming from components running outside of cluster (for example length of queue in cloud messaging service, or QPS from loadbalancer running outside of cluster).
:param pulumi.Input['ObjectMetricStatusArgs'] object: object refers to a metric describing a single kubernetes object (for example, hits-per-second on an Ingress object).
:param pulumi.Input['PodsMetricStatusArgs'] pods: pods refers to a metric describing each pod in the current scale target (for example, transactions-processed-per-second). The values will be averaged together before being compared to the target value.
:param pulumi.Input['ResourceMetricStatusArgs'] resource: resource refers to a resource metric (such as those specified in requests and limits) known to Kubernetes describing each pod in the current scale target (e.g. CPU or memory). Such metrics are built in to Kubernetes, and have special scaling options on top of those available to normal per-pod metrics using the "pods" source.
"""
pulumi.set(__self__, "type", type)
if external is not None:
pulumi.set(__self__, "external", external)
if object is not None:
pulumi.set(__self__, "object", object)
if pods is not None:
pulumi.set(__self__, "pods", pods)
if resource is not None:
pulumi.set(__self__, "resource", resource)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
type is the type of metric source. It will be one of "Object", "Pods" or "Resource", each corresponds to a matching field in the object.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def external(self) -> Optional[pulumi.Input['ExternalMetricStatusArgs']]:
"""
external refers to a global metric that is not associated with any Kubernetes object. It allows autoscaling based on information coming from components running outside of cluster (for example length of queue in cloud messaging service, or QPS from loadbalancer running outside of cluster).
"""
return pulumi.get(self, "external")
@external.setter
def external(self, value: Optional[pulumi.Input['ExternalMetricStatusArgs']]):
pulumi.set(self, "external", value)
@property
@pulumi.getter
def object(self) -> Optional[pulumi.Input['ObjectMetricStatusArgs']]:
"""
object refers to a metric describing a single kubernetes object (for example, hits-per-second on an Ingress object).
"""
return pulumi.get(self, "object")
@object.setter
def object(self, value: Optional[pulumi.Input['ObjectMetricStatusArgs']]):
pulumi.set(self, "object", value)
@property
@pulumi.getter
def pods(self) -> Optional[pulumi.Input['PodsMetricStatusArgs']]:
"""
pods refers to a metric describing each pod in the current scale target (for example, transactions-processed-per-second). The values will be averaged together before being compared to the target value.
"""
return pulumi.get(self, "pods")
@pods.setter
def pods(self, value: Optional[pulumi.Input['PodsMetricStatusArgs']]):
pulumi.set(self, "pods", value)
@property
@pulumi.getter
def resource(self) -> Optional[pulumi.Input['ResourceMetricStatusArgs']]:
"""
resource refers to a resource metric (such as those specified in requests and limits) known to Kubernetes describing each pod in the current scale target (e.g. CPU or memory). Such metrics are built in to Kubernetes, and have special scaling options on top of those available to normal per-pod metrics using the "pods" source.
"""
return pulumi.get(self, "resource")
@resource.setter
def resource(self, value: Optional[pulumi.Input['ResourceMetricStatusArgs']]):
pulumi.set(self, "resource", value)
@pulumi.input_type
class ObjectMetricSourceArgs:
def __init__(__self__, *,
metric_name: pulumi.Input[str],
target: pulumi.Input['CrossVersionObjectReferenceArgs'],
target_value: pulumi.Input[str],
average_value: Optional[pulumi.Input[str]] = None,
selector: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']] = None):
"""
ObjectMetricSource indicates how to scale on a metric describing a kubernetes object (for example, hits-per-second on an Ingress object).
:param pulumi.Input[str] metric_name: metricName is the name of the metric in question.
:param pulumi.Input['CrossVersionObjectReferenceArgs'] target: target is the described Kubernetes object.
:param pulumi.Input[str] target_value: targetValue is the target value of the metric (as a quantity).
:param pulumi.Input[str] average_value: averageValue is the target value of the average of the metric across all relevant pods (as a quantity)
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping When unset, just the metricName will be used to gather metrics.
"""
pulumi.set(__self__, "metric_name", metric_name)
pulumi.set(__self__, "target", target)
pulumi.set(__self__, "target_value", target_value)
if average_value is not None:
pulumi.set(__self__, "average_value", average_value)
if selector is not None:
pulumi.set(__self__, "selector", selector)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> pulumi.Input[str]:
"""
metricName is the name of the metric in question.
"""
return pulumi.get(self, "metric_name")
@metric_name.setter
def metric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "metric_name", value)
@property
@pulumi.getter
def target(self) -> pulumi.Input['CrossVersionObjectReferenceArgs']:
"""
target is the described Kubernetes object.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: pulumi.Input['CrossVersionObjectReferenceArgs']):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="targetValue")
def target_value(self) -> pulumi.Input[str]:
"""
targetValue is the target value of the metric (as a quantity).
"""
return pulumi.get(self, "target_value")
@target_value.setter
def target_value(self, value: pulumi.Input[str]):
pulumi.set(self, "target_value", value)
@property
@pulumi.getter(name="averageValue")
def average_value(self) -> Optional[pulumi.Input[str]]:
"""
averageValue is the target value of the average of the metric across all relevant pods (as a quantity)
"""
return pulumi.get(self, "average_value")
@average_value.setter
def average_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "average_value", value)
@property
@pulumi.getter
def selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]:
"""
selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping When unset, just the metricName will be used to gather metrics.
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]):
pulumi.set(self, "selector", value)
@pulumi.input_type
class ObjectMetricStatusArgs:
def __init__(__self__, *,
current_value: pulumi.Input[str],
metric_name: pulumi.Input[str],
target: pulumi.Input['CrossVersionObjectReferenceArgs'],
average_value: Optional[pulumi.Input[str]] = None,
selector: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']] = None):
"""
ObjectMetricStatus indicates the current value of a metric describing a kubernetes object (for example, hits-per-second on an Ingress object).
:param pulumi.Input[str] current_value: currentValue is the current value of the metric (as a quantity).
:param pulumi.Input[str] metric_name: metricName is the name of the metric in question.
:param pulumi.Input['CrossVersionObjectReferenceArgs'] target: target is the described Kubernetes object.
:param pulumi.Input[str] average_value: averageValue is the current value of the average of the metric across all relevant pods (as a quantity)
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: selector is the string-encoded form of a standard kubernetes label selector for the given metric When set in the ObjectMetricSource, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics.
"""
pulumi.set(__self__, "current_value", current_value)
pulumi.set(__self__, "metric_name", metric_name)
pulumi.set(__self__, "target", target)
if average_value is not None:
pulumi.set(__self__, "average_value", average_value)
if selector is not None:
pulumi.set(__self__, "selector", selector)
@property
@pulumi.getter(name="currentValue")
def current_value(self) -> pulumi.Input[str]:
"""
currentValue is the current value of the metric (as a quantity).
"""
return pulumi.get(self, "current_value")
@current_value.setter
def current_value(self, value: pulumi.Input[str]):
pulumi.set(self, "current_value", value)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> pulumi.Input[str]:
"""
metricName is the name of the metric in question.
"""
return pulumi.get(self, "metric_name")
@metric_name.setter
def metric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "metric_name", value)
@property
@pulumi.getter
def target(self) -> pulumi.Input['CrossVersionObjectReferenceArgs']:
"""
target is the described Kubernetes object.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: pulumi.Input['CrossVersionObjectReferenceArgs']):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="averageValue")
def average_value(self) -> Optional[pulumi.Input[str]]:
"""
averageValue is the current value of the average of the metric across all relevant pods (as a quantity)
"""
return pulumi.get(self, "average_value")
@average_value.setter
def average_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "average_value", value)
@property
@pulumi.getter
def selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]:
"""
selector is the string-encoded form of a standard kubernetes label selector for the given metric When set in the ObjectMetricSource, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics.
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]):
pulumi.set(self, "selector", value)
@pulumi.input_type
class PodsMetricSourceArgs:
def __init__(__self__, *,
metric_name: pulumi.Input[str],
target_average_value: pulumi.Input[str],
selector: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']] = None):
"""
PodsMetricSource indicates how to scale on a metric describing each pod in the current scale target (for example, transactions-processed-per-second). The values will be averaged together before being compared to the target value.
:param pulumi.Input[str] metric_name: metricName is the name of the metric in question
:param pulumi.Input[str] target_average_value: targetAverageValue is the target value of the average of the metric across all relevant pods (as a quantity)
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping When unset, just the metricName will be used to gather metrics.
"""
pulumi.set(__self__, "metric_name", metric_name)
pulumi.set(__self__, "target_average_value", target_average_value)
if selector is not None:
pulumi.set(__self__, "selector", selector)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> pulumi.Input[str]:
"""
metricName is the name of the metric in question
"""
return pulumi.get(self, "metric_name")
@metric_name.setter
def metric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "metric_name", value)
@property
@pulumi.getter(name="targetAverageValue")
def target_average_value(self) -> pulumi.Input[str]:
"""
targetAverageValue is the target value of the average of the metric across all relevant pods (as a quantity)
"""
return pulumi.get(self, "target_average_value")
@target_average_value.setter
def target_average_value(self, value: pulumi.Input[str]):
pulumi.set(self, "target_average_value", value)
@property
@pulumi.getter
def selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]:
"""
selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping When unset, just the metricName will be used to gather metrics.
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]):
pulumi.set(self, "selector", value)
@pulumi.input_type
class PodsMetricStatusArgs:
def __init__(__self__, *,
current_average_value: pulumi.Input[str],
metric_name: pulumi.Input[str],
selector: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']] = None):
"""
PodsMetricStatus indicates the current value of a metric describing each pod in the current scale target (for example, transactions-processed-per-second).
:param pulumi.Input[str] current_average_value: currentAverageValue is the current value of the average of the metric across all relevant pods (as a quantity)
:param pulumi.Input[str] metric_name: metricName is the name of the metric in question
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: selector is the string-encoded form of a standard kubernetes label selector for the given metric When set in the PodsMetricSource, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics.
"""
pulumi.set(__self__, "current_average_value", current_average_value)
pulumi.set(__self__, "metric_name", metric_name)
if selector is not None:
pulumi.set(__self__, "selector", selector)
@property
@pulumi.getter(name="currentAverageValue")
def current_average_value(self) -> pulumi.Input[str]:
"""
currentAverageValue is the current value of the average of the metric across all relevant pods (as a quantity)
"""
return pulumi.get(self, "current_average_value")
@current_average_value.setter
def current_average_value(self, value: pulumi.Input[str]):
pulumi.set(self, "current_average_value", value)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> pulumi.Input[str]:
"""
metricName is the name of the metric in question
"""
return pulumi.get(self, "metric_name")
@metric_name.setter
def metric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "metric_name", value)
@property
@pulumi.getter
def selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]:
"""
selector is the string-encoded form of a standard kubernetes label selector for the given metric When set in the PodsMetricSource, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics.
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]):
pulumi.set(self, "selector", value)
@pulumi.input_type
class ResourceMetricSourceArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
target_average_utilization: Optional[pulumi.Input[float]] = None,
target_average_value: Optional[pulumi.Input[str]] = None):
"""
ResourceMetricSource indicates how to scale on a resource metric known to Kubernetes, as specified in requests and limits, describing each pod in the current scale target (e.g. CPU or memory). The values will be averaged together before being compared to the target. Such metrics are built in to Kubernetes, and have special scaling options on top of those available to normal per-pod metrics using the "pods" source. Only one "target" type should be set.
:param pulumi.Input[str] name: name is the name of the resource in question.
:param pulumi.Input[float] target_average_utilization: targetAverageUtilization is the target value of the average of the resource metric across all relevant pods, represented as a percentage of the requested value of the resource for the pods.
:param pulumi.Input[str] target_average_value: targetAverageValue is the target value of the average of the resource metric across all relevant pods, as a raw value (instead of as a percentage of the request), similar to the "pods" metric source type.
"""
pulumi.set(__self__, "name", name)
if target_average_utilization is not None:
pulumi.set(__self__, "target_average_utilization", target_average_utilization)
if target_average_value is not None:
pulumi.set(__self__, "target_average_value", target_average_value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
name is the name of the resource in question.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="targetAverageUtilization")
def target_average_utilization(self) -> Optional[pulumi.Input[float]]:
"""
targetAverageUtilization is the target value of the average of the resource metric across all relevant pods, represented as a percentage of the requested value of the resource for the pods.
"""
return pulumi.get(self, "target_average_utilization")
@target_average_utilization.setter
def target_average_utilization(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "target_average_utilization", value)
@property
@pulumi.getter(name="targetAverageValue")
def target_average_value(self) -> Optional[pulumi.Input[str]]:
"""
targetAverageValue is the target value of the average of the resource metric across all relevant pods, as a raw value (instead of as a percentage of the request), similar to the "pods" metric source type.
"""
return pulumi.get(self, "target_average_value")
@target_average_value.setter
def target_average_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_average_value", value)
@pulumi.input_type
class ResourceMetricStatusArgs:
def __init__(__self__, *,
current_average_value: pulumi.Input[str],
name: pulumi.Input[str],
current_average_utilization: Optional[pulumi.Input[float]] = None):
"""
ResourceMetricStatus indicates the current value of a resource metric known to Kubernetes, as specified in requests and limits, describing each pod in the current scale target (e.g. CPU or memory). Such metrics are built in to Kubernetes, and have special scaling options on top of those available to normal per-pod metrics using the "pods" source.
:param pulumi.Input[str] current_average_value: currentAverageValue is the current value of the average of the resource metric across all relevant pods, as a raw value (instead of as a percentage of the request), similar to the "pods" metric source type. It will always be set, regardless of the corresponding metric specification.
:param pulumi.Input[str] name: name is the name of the resource in question.
:param pulumi.Input[float] current_average_utilization: currentAverageUtilization is the current value of the average of the resource metric across all relevant pods, represented as a percentage of the requested value of the resource for the pods. It will only be present if `targetAverageValue` was set in the corresponding metric specification.
"""
pulumi.set(__self__, "current_average_value", current_average_value)
pulumi.set(__self__, "name", name)
if current_average_utilization is not None:
pulumi.set(__self__, "current_average_utilization", current_average_utilization)
@property
@pulumi.getter(name="currentAverageValue")
def current_average_value(self) -> pulumi.Input[str]:
"""
currentAverageValue is the current value of the average of the resource metric across all relevant pods, as a raw value (instead of as a percentage of the request), similar to the "pods" metric source type. It will always be set, regardless of the corresponding metric specification.
"""
return pulumi.get(self, "current_average_value")
@current_average_value.setter
def current_average_value(self, value: pulumi.Input[str]):
pulumi.set(self, "current_average_value", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
name is the name of the resource in question.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="currentAverageUtilization")
def current_average_utilization(self) -> Optional[pulumi.Input[float]]:
"""
currentAverageUtilization is the current value of the average of the resource metric across all relevant pods, represented as a percentage of the requested value of the resource for the pods. It will only be present if `targetAverageValue` was set in the corresponding metric specification.
"""
return pulumi.get(self, "current_average_utilization")
@current_average_utilization.setter
def current_average_utilization(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "current_average_utilization", value)
| 50.618037
| 554
| 0.693182
| 7,085
| 57,249
| 5.482428
| 0.057163
| 0.07901
| 0.047576
| 0.030816
| 0.876348
| 0.829442
| 0.790644
| 0.753855
| 0.736915
| 0.707927
| 0
| 0.001
| 0.21403
| 57,249
| 1,130
| 555
| 50.662832
| 0.862254
| 0.42137
| 0
| 0.639344
| 1
| 0
| 0.152548
| 0.073127
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210134
| false
| 0
| 0.008942
| 0
| 0.33532
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
82ea9ac141fdd07fe63b2365852e1905c35fa807
| 16,829
|
py
|
Python
|
sdk/python/pulumi_auth0/branding.py
|
kevinschoonover/pulumi-auth0
|
18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_auth0/branding.py
|
kevinschoonover/pulumi-auth0
|
18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_auth0/branding.py
|
kevinschoonover/pulumi-auth0
|
18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['BrandingArgs', 'Branding']
@pulumi.input_type
class BrandingArgs:
def __init__(__self__, *,
colors: Optional[pulumi.Input['BrandingColorsArgs']] = None,
favicon_url: Optional[pulumi.Input[str]] = None,
font: Optional[pulumi.Input['BrandingFontArgs']] = None,
logo_url: Optional[pulumi.Input[str]] = None,
universal_login: Optional[pulumi.Input['BrandingUniversalLoginArgs']] = None):
"""
The set of arguments for constructing a Branding resource.
:param pulumi.Input['BrandingColorsArgs'] colors: List(Resource). Configuration settings for colors for branding. See Colors.
:param pulumi.Input[str] favicon_url: String. URL for the favicon.
:param pulumi.Input['BrandingFontArgs'] font: List(Resource). Configuration settings to customize the font. See Font.
:param pulumi.Input[str] logo_url: String. URL of logo for branding.
:param pulumi.Input['BrandingUniversalLoginArgs'] universal_login: List(Resource). Configuration settings for Universal Login. See Universal Login. This capability can only be used if the tenant has [Custom Domains](https://auth0.com/docs/custom-domains) enabled.
"""
if colors is not None:
pulumi.set(__self__, "colors", colors)
if favicon_url is not None:
pulumi.set(__self__, "favicon_url", favicon_url)
if font is not None:
pulumi.set(__self__, "font", font)
if logo_url is not None:
pulumi.set(__self__, "logo_url", logo_url)
if universal_login is not None:
pulumi.set(__self__, "universal_login", universal_login)
@property
@pulumi.getter
def colors(self) -> Optional[pulumi.Input['BrandingColorsArgs']]:
"""
List(Resource). Configuration settings for colors for branding. See Colors.
"""
return pulumi.get(self, "colors")
@colors.setter
def colors(self, value: Optional[pulumi.Input['BrandingColorsArgs']]):
pulumi.set(self, "colors", value)
@property
@pulumi.getter(name="faviconUrl")
def favicon_url(self) -> Optional[pulumi.Input[str]]:
"""
String. URL for the favicon.
"""
return pulumi.get(self, "favicon_url")
@favicon_url.setter
def favicon_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "favicon_url", value)
@property
@pulumi.getter
def font(self) -> Optional[pulumi.Input['BrandingFontArgs']]:
"""
List(Resource). Configuration settings to customize the font. See Font.
"""
return pulumi.get(self, "font")
@font.setter
def font(self, value: Optional[pulumi.Input['BrandingFontArgs']]):
pulumi.set(self, "font", value)
@property
@pulumi.getter(name="logoUrl")
def logo_url(self) -> Optional[pulumi.Input[str]]:
"""
String. URL of logo for branding.
"""
return pulumi.get(self, "logo_url")
@logo_url.setter
def logo_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logo_url", value)
@property
@pulumi.getter(name="universalLogin")
def universal_login(self) -> Optional[pulumi.Input['BrandingUniversalLoginArgs']]:
"""
List(Resource). Configuration settings for Universal Login. See Universal Login. This capability can only be used if the tenant has [Custom Domains](https://auth0.com/docs/custom-domains) enabled.
"""
return pulumi.get(self, "universal_login")
@universal_login.setter
def universal_login(self, value: Optional[pulumi.Input['BrandingUniversalLoginArgs']]):
pulumi.set(self, "universal_login", value)
@pulumi.input_type
class _BrandingState:
def __init__(__self__, *,
colors: Optional[pulumi.Input['BrandingColorsArgs']] = None,
favicon_url: Optional[pulumi.Input[str]] = None,
font: Optional[pulumi.Input['BrandingFontArgs']] = None,
logo_url: Optional[pulumi.Input[str]] = None,
universal_login: Optional[pulumi.Input['BrandingUniversalLoginArgs']] = None):
"""
Input properties used for looking up and filtering Branding resources.
:param pulumi.Input['BrandingColorsArgs'] colors: List(Resource). Configuration settings for colors for branding. See Colors.
:param pulumi.Input[str] favicon_url: String. URL for the favicon.
:param pulumi.Input['BrandingFontArgs'] font: List(Resource). Configuration settings to customize the font. See Font.
:param pulumi.Input[str] logo_url: String. URL of logo for branding.
:param pulumi.Input['BrandingUniversalLoginArgs'] universal_login: List(Resource). Configuration settings for Universal Login. See Universal Login. This capability can only be used if the tenant has [Custom Domains](https://auth0.com/docs/custom-domains) enabled.
"""
if colors is not None:
pulumi.set(__self__, "colors", colors)
if favicon_url is not None:
pulumi.set(__self__, "favicon_url", favicon_url)
if font is not None:
pulumi.set(__self__, "font", font)
if logo_url is not None:
pulumi.set(__self__, "logo_url", logo_url)
if universal_login is not None:
pulumi.set(__self__, "universal_login", universal_login)
@property
@pulumi.getter
def colors(self) -> Optional[pulumi.Input['BrandingColorsArgs']]:
"""
List(Resource). Configuration settings for colors for branding. See Colors.
"""
return pulumi.get(self, "colors")
@colors.setter
def colors(self, value: Optional[pulumi.Input['BrandingColorsArgs']]):
pulumi.set(self, "colors", value)
@property
@pulumi.getter(name="faviconUrl")
def favicon_url(self) -> Optional[pulumi.Input[str]]:
"""
String. URL for the favicon.
"""
return pulumi.get(self, "favicon_url")
@favicon_url.setter
def favicon_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "favicon_url", value)
@property
@pulumi.getter
def font(self) -> Optional[pulumi.Input['BrandingFontArgs']]:
"""
List(Resource). Configuration settings to customize the font. See Font.
"""
return pulumi.get(self, "font")
@font.setter
def font(self, value: Optional[pulumi.Input['BrandingFontArgs']]):
pulumi.set(self, "font", value)
@property
@pulumi.getter(name="logoUrl")
def logo_url(self) -> Optional[pulumi.Input[str]]:
"""
String. URL of logo for branding.
"""
return pulumi.get(self, "logo_url")
@logo_url.setter
def logo_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logo_url", value)
@property
@pulumi.getter(name="universalLogin")
def universal_login(self) -> Optional[pulumi.Input['BrandingUniversalLoginArgs']]:
"""
List(Resource). Configuration settings for Universal Login. See Universal Login. This capability can only be used if the tenant has [Custom Domains](https://auth0.com/docs/custom-domains) enabled.
"""
return pulumi.get(self, "universal_login")
@universal_login.setter
def universal_login(self, value: Optional[pulumi.Input['BrandingUniversalLoginArgs']]):
pulumi.set(self, "universal_login", value)
class Branding(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
colors: Optional[pulumi.Input[pulumi.InputType['BrandingColorsArgs']]] = None,
favicon_url: Optional[pulumi.Input[str]] = None,
font: Optional[pulumi.Input[pulumi.InputType['BrandingFontArgs']]] = None,
logo_url: Optional[pulumi.Input[str]] = None,
universal_login: Optional[pulumi.Input[pulumi.InputType['BrandingUniversalLoginArgs']]] = None,
__props__=None):
"""
With Auth0, you can setting logo, color to maintain a consistent service brand. This resource allows you to manage a branding within your Auth0 tenant.
## Example Usage
```python
import pulumi
import pulumi_auth0 as auth0
my_brand = auth0.Branding("myBrand",
colors=auth0.BrandingColorsArgs(
page_background="#000000",
primary="#0059d6",
),
logo_url="https://mycompany.org/logo.png",
universal_login=auth0.BrandingUniversalLoginArgs(
body="<!DOCTYPE html><html><head>{%- auth0:head -%}</head><body>{%- auth0:widget -%}</body></html>",
))
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['BrandingColorsArgs']] colors: List(Resource). Configuration settings for colors for branding. See Colors.
:param pulumi.Input[str] favicon_url: String. URL for the favicon.
:param pulumi.Input[pulumi.InputType['BrandingFontArgs']] font: List(Resource). Configuration settings to customize the font. See Font.
:param pulumi.Input[str] logo_url: String. URL of logo for branding.
:param pulumi.Input[pulumi.InputType['BrandingUniversalLoginArgs']] universal_login: List(Resource). Configuration settings for Universal Login. See Universal Login. This capability can only be used if the tenant has [Custom Domains](https://auth0.com/docs/custom-domains) enabled.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[BrandingArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
With Auth0, you can setting logo, color to maintain a consistent service brand. This resource allows you to manage a branding within your Auth0 tenant.
## Example Usage
```python
import pulumi
import pulumi_auth0 as auth0
my_brand = auth0.Branding("myBrand",
colors=auth0.BrandingColorsArgs(
page_background="#000000",
primary="#0059d6",
),
logo_url="https://mycompany.org/logo.png",
universal_login=auth0.BrandingUniversalLoginArgs(
body="<!DOCTYPE html><html><head>{%- auth0:head -%}</head><body>{%- auth0:widget -%}</body></html>",
))
```
:param str resource_name: The name of the resource.
:param BrandingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BrandingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
colors: Optional[pulumi.Input[pulumi.InputType['BrandingColorsArgs']]] = None,
favicon_url: Optional[pulumi.Input[str]] = None,
font: Optional[pulumi.Input[pulumi.InputType['BrandingFontArgs']]] = None,
logo_url: Optional[pulumi.Input[str]] = None,
universal_login: Optional[pulumi.Input[pulumi.InputType['BrandingUniversalLoginArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BrandingArgs.__new__(BrandingArgs)
__props__.__dict__["colors"] = colors
__props__.__dict__["favicon_url"] = favicon_url
__props__.__dict__["font"] = font
__props__.__dict__["logo_url"] = logo_url
__props__.__dict__["universal_login"] = universal_login
super(Branding, __self__).__init__(
'auth0:index/branding:Branding',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
colors: Optional[pulumi.Input[pulumi.InputType['BrandingColorsArgs']]] = None,
favicon_url: Optional[pulumi.Input[str]] = None,
font: Optional[pulumi.Input[pulumi.InputType['BrandingFontArgs']]] = None,
logo_url: Optional[pulumi.Input[str]] = None,
universal_login: Optional[pulumi.Input[pulumi.InputType['BrandingUniversalLoginArgs']]] = None) -> 'Branding':
"""
Get an existing Branding resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['BrandingColorsArgs']] colors: List(Resource). Configuration settings for colors for branding. See Colors.
:param pulumi.Input[str] favicon_url: String. URL for the favicon.
:param pulumi.Input[pulumi.InputType['BrandingFontArgs']] font: List(Resource). Configuration settings to customize the font. See Font.
:param pulumi.Input[str] logo_url: String. URL of logo for branding.
:param pulumi.Input[pulumi.InputType['BrandingUniversalLoginArgs']] universal_login: List(Resource). Configuration settings for Universal Login. See Universal Login. This capability can only be used if the tenant has [Custom Domains](https://auth0.com/docs/custom-domains) enabled.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BrandingState.__new__(_BrandingState)
__props__.__dict__["colors"] = colors
__props__.__dict__["favicon_url"] = favicon_url
__props__.__dict__["font"] = font
__props__.__dict__["logo_url"] = logo_url
__props__.__dict__["universal_login"] = universal_login
return Branding(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def colors(self) -> pulumi.Output[Optional['outputs.BrandingColors']]:
"""
List(Resource). Configuration settings for colors for branding. See Colors.
"""
return pulumi.get(self, "colors")
@property
@pulumi.getter(name="faviconUrl")
def favicon_url(self) -> pulumi.Output[str]:
"""
String. URL for the favicon.
"""
return pulumi.get(self, "favicon_url")
@property
@pulumi.getter
def font(self) -> pulumi.Output[Optional['outputs.BrandingFont']]:
"""
List(Resource). Configuration settings to customize the font. See Font.
"""
return pulumi.get(self, "font")
@property
@pulumi.getter(name="logoUrl")
def logo_url(self) -> pulumi.Output[str]:
"""
String. URL of logo for branding.
"""
return pulumi.get(self, "logo_url")
@property
@pulumi.getter(name="universalLogin")
def universal_login(self) -> pulumi.Output[Optional['outputs.BrandingUniversalLogin']]:
"""
List(Resource). Configuration settings for Universal Login. See Universal Login. This capability can only be used if the tenant has [Custom Domains](https://auth0.com/docs/custom-domains) enabled.
"""
return pulumi.get(self, "universal_login")
| 45.239247
| 289
| 0.650128
| 1,873
| 16,829
| 5.644421
| 0.098772
| 0.071793
| 0.080874
| 0.065551
| 0.848089
| 0.829834
| 0.82378
| 0.814037
| 0.809213
| 0.794552
| 0
| 0.003825
| 0.238873
| 16,829
| 371
| 290
| 45.361186
| 0.821532
| 0.35932
| 0
| 0.769608
| 1
| 0
| 0.13334
| 0.031941
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156863
| false
| 0.004902
| 0.034314
| 0
| 0.284314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d25f90c8e3e466c460ce99c74f32ccdf105da34b
| 2,704
|
py
|
Python
|
result_analysis/plot_tpcc.py
|
readablesystems/cicada-exp-sigmod2017
|
d50cb5e7f835f5fcacccd31df22a233defdf2607
|
[
"Apache-2.0"
] | 16
|
2017-11-13T02:52:05.000Z
|
2021-04-27T07:47:49.000Z
|
result_analysis/plot_tpcc.py
|
readablesystems/cicada-exp-sigmod2017
|
d50cb5e7f835f5fcacccd31df22a233defdf2607
|
[
"Apache-2.0"
] | null | null | null |
result_analysis/plot_tpcc.py
|
readablesystems/cicada-exp-sigmod2017
|
d50cb5e7f835f5fcacccd31df22a233defdf2607
|
[
"Apache-2.0"
] | 7
|
2018-09-06T14:34:24.000Z
|
2021-02-28T01:17:02.000Z
|
from common import *
for bench, simple_index_update in [('TPCC', None), ('TPCC-FULL', None), ('TPCC-FULL', 1)]:
df = load_data('exp_data')
max_thread_count = detect_max_thread_count(df)
# print(df)
cond = df['tag'] == 'macrobench'
cond &= df['bench'] == bench
if simple_index_update:
cond &= df['simple_index_update'] == 1
else:
cond &= df['simple_index_update'].isnull()
df = df[cond]
# print(df)
if True:
cond = df['warehouse_count'] == df['thread_count']
filename = 'output_macrobench_%s_fixed_ratio' % bench.lower()
if simple_index_update: filename += '_siu'
filename = filename.replace('.', '_')
try:
plot_tput_graph(df[cond], 'thread_count', 'Thread count', filename, max_thread_count=max_thread_count)
# plot_tput_graph(df[cond], 'thread_count', 'Thread count', filename + '_bar', bar_chart=True, max_thread_count=max_thread_count)
# plot_tput_graph(df[cond], 'thread_count', 'Thread count', filename + '_abort', max_thread_count=max_thread_count, plot_abort_rate=True)
except:
import traceback
traceback.print_exc()
for warehouse_count in [1, 2, 4, 8, 16, max_thread_count]:
cond = df['warehouse_count'] == warehouse_count
filename = 'output_macrobench_%s_warehouse_%d' % (bench.lower(), warehouse_count)
if simple_index_update: filename += '_siu'
filename = filename.replace('.', '_')
try:
plot_tput_graph(df[cond], 'thread_count', 'Thread count', filename, max_thread_count=max_thread_count)
# plot_tput_graph(df[cond], 'thread_count', 'Thread count', filename + '_bar', bar_chart=True, max_thread_count=max_thread_count)
# plot_tput_graph(df[cond], 'thread_count', 'Thread count', filename + '_abort', max_thread_count=max_thread_count, plot_abort_rate=True)
except:
import traceback
traceback.print_exc()
# for thread_count in [1, 2, 4, 8, 16, max_thread_count, 32, 48, max_thread_count * 2]:
for thread_count in [1, 2, 4, 8, 16, max_thread_count]:
cond = df['thread_count'] == thread_count
filename = 'output_macrobench_%s_thread_%d' % (bench.lower(), thread_count)
if simple_index_update: filename += '_siu'
filename = filename.replace('.', '_')
try:
plot_tput_graph(df[cond], 'warehouse_count', 'Warehouse count', filename, max_thread_count=max_thread_count)
# plot_tput_graph(df[cond], 'warehouse_count', 'Warehouse count', filename + '_bar', bar_chart=True, max_thread_count=max_thread_count)
# plot_tput_graph(df[cond], 'warehouse_count', 'Warehouse count', filename + '_abort', max_thread_count=max_thread_count, plot_abort_rate=True)
except:
import traceback
traceback.print_exc()
| 46.62069
| 149
| 0.698595
| 372
| 2,704
| 4.704301
| 0.150538
| 0.264
| 0.192
| 0.077143
| 0.805714
| 0.742857
| 0.701714
| 0.701714
| 0.701714
| 0.701714
| 0
| 0.011033
| 0.161982
| 2,704
| 57
| 150
| 47.438596
| 0.761253
| 0.336169
| 0
| 0.487805
| 0
| 0
| 0.185434
| 0.053221
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.097561
| 0
| 0.097561
| 0.073171
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
96299b8944d4e8af255b5b9260cf79928ad22e78
| 363
|
py
|
Python
|
divisors.py
|
mintzv6/Python_pgm
|
1e2d65b59d036707ba21addc1998780511380439
|
[
"MIT"
] | null | null | null |
divisors.py
|
mintzv6/Python_pgm
|
1e2d65b59d036707ba21addc1998780511380439
|
[
"MIT"
] | null | null | null |
divisors.py
|
mintzv6/Python_pgm
|
1e2d65b59d036707ba21addc1998780511380439
|
[
"MIT"
] | null | null | null |
'''def print_divisors(n):
print("The divisors of",n,"are:")
for i in range(1,n+1):
if n%i==0:
print(i)
num=int(input("Enter the Number:"))
print_divisors(num)
'''
def print_divisors(n):
print("The divisors of",n,"are:")
list=[]
for i in range(1,n+1):
if n%i==0:
list.append(i)
print(list)
num=int(input("Enter the Number:"))
print_divisors(num)
| 18.15
| 35
| 0.639118
| 68
| 363
| 3.352941
| 0.308824
| 0.22807
| 0.140351
| 0.149123
| 0.868421
| 0.868421
| 0.868421
| 0.868421
| 0.868421
| 0.508772
| 0
| 0.019355
| 0.146006
| 363
| 20
| 36
| 18.15
| 0.716129
| 0.584022
| 0
| 0
| 0
| 0
| 0.186529
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.111111
| 0.444444
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
9656ebbb51cc5626b9348c4b981f11cdeed7a45f
| 8,664
|
py
|
Python
|
tests/test_report.py
|
guptarajat/qds-sdk-py
|
3da8b15aa9cf60024298f2a6ac356353f1c7c9df
|
[
"Apache-2.0"
] | null | null | null |
tests/test_report.py
|
guptarajat/qds-sdk-py
|
3da8b15aa9cf60024298f2a6ac356353f1c7c9df
|
[
"Apache-2.0"
] | null | null | null |
tests/test_report.py
|
guptarajat/qds-sdk-py
|
3da8b15aa9cf60024298f2a6ac356353f1c7c9df
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import sys
import os
if sys.version_info > (2, 7, 0):
import unittest
else:
import unittest2 as unittest
from mock import Mock
import tempfile
sys.path.append(os.path.join(os.path.dirname(__file__), '../bin'))
import qds
from qds_sdk.connection import Connection
from test_base import print_command
from test_base import QdsCliTestCase
class TestReportList(QdsCliTestCase):
def test_minimal(self):
sys.argv = ['qds.py', 'report', 'list']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET", "reports", params=None)
class TestReportCanonicalHiveCommands(QdsCliTestCase):
def test_minimal(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands", params={})
def test_start_date(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--start-date', '2014-01-01']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'start_date': '2014-01-01'})
def test_end_date(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--end-date', '2014-01-01']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'end_date': '2014-01-01'})
def test_offset(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--offset', '10']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'offset': 10})
def test_limit(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--limit', '20']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'limit': 20})
def test_sort_frequency(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--sort', 'frequency']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'sort_column': 'frequency'})
def test_sort_cpu(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--sort', 'cpu']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'sort_column': 'cpu'})
def test_sort_fs_bytes_read(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--sort', 'fs_bytes_read']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'sort_column': 'fs_bytes_read'})
def test_sort_fs_bytes_written(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--sort', 'fs_bytes_written']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'sort_column': 'fs_bytes_written'})
def test_sort_invalid(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--sort', 'invalid']
print_command()
with self.assertRaises(SystemExit):
qds.main()
def test_show_ast(self):
sys.argv = ['qds.py', 'report', 'canonical_hive_commands',
'--show-ast']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/canonical_hive_commands",
params={'show_ast': True})
class TestReportAllCommands(QdsCliTestCase):
def test_minimal(self):
sys.argv = ['qds.py', 'report', 'all_commands']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands", params={})
def test_start_date(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--start-date', '2014-01-01']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'start_date': '2014-01-01'})
def test_end_date(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--end-date', '2014-01-01']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'end_date': '2014-01-01'})
def test_offset(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--offset', '10']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'offset': 10})
def test_limit(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--limit', '20']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'limit': 20})
def test_sort_time(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--sort', 'time']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'sort_column': 'time'})
def test_sort_cpu(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--sort', 'cpu']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'sort_column': 'cpu'})
def test_sort_fs_bytes_read(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--sort', 'fs_bytes_read']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'sort_column': 'fs_bytes_read'})
def test_sort_fs_bytes_written(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--sort', 'fs_bytes_written']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'sort_column': 'fs_bytes_written'})
def test_sort_invalid(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--sort', 'invalid']
print_command()
with self.assertRaises(SystemExit):
qds.main()
def test_by_user(self):
sys.argv = ['qds.py', 'report', 'all_commands',
'--by-user']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("GET",
"reports/all_commands",
params={'by_user': True})
if __name__ == '__main__':
unittest.main()
| 35.654321
| 78
| 0.579063
| 952
| 8,664
| 4.921218
| 0.090336
| 0.116542
| 0.152401
| 0.06873
| 0.894771
| 0.894771
| 0.894771
| 0.887086
| 0.878122
| 0.85016
| 0
| 0.013453
| 0.279317
| 8,664
| 242
| 79
| 35.801653
| 0.736867
| 0
| 0
| 0.826291
| 0
| 0
| 0.211103
| 0.064982
| 0
| 0
| 0
| 0
| 0.107981
| 1
| 0.107981
| false
| 0
| 0.051643
| 0
| 0.173709
| 0.117371
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96a02944cd67904bb0a80f5d4b168850d4f54382
| 84
|
py
|
Python
|
src/django_pg_bulk_update/__init__.py
|
kjfl/django-pg-bulk-update
|
8dfbee138d91e25174c394f59047cd23fdc9f823
|
[
"BSD-3-Clause"
] | 26
|
2020-07-24T19:44:04.000Z
|
2022-03-28T10:31:32.000Z
|
src/django_pg_bulk_update/__init__.py
|
kjfl/django-pg-bulk-update
|
8dfbee138d91e25174c394f59047cd23fdc9f823
|
[
"BSD-3-Clause"
] | 32
|
2018-03-30T07:48:49.000Z
|
2020-06-01T12:10:26.000Z
|
src/django_pg_bulk_update/__init__.py
|
kjfl/django-pg-bulk-update
|
8dfbee138d91e25174c394f59047cd23fdc9f823
|
[
"BSD-3-Clause"
] | 5
|
2020-12-21T21:06:17.000Z
|
2022-01-11T14:57:48.000Z
|
from .query import * # noqa: F401, F403
from .manager import * # noqa: F401, F403
| 28
| 42
| 0.666667
| 12
| 84
| 4.666667
| 0.583333
| 0.357143
| 0.5
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0.214286
| 84
| 2
| 43
| 42
| 0.666667
| 0.392857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
96c23327b85813bdf94f69427a9389083d72df04
| 47
|
py
|
Python
|
__init__.py
|
kkew3/torchprobe
|
8cd409ad5c1bebb8f2361188b511a7b91fd7ea8d
|
[
"MIT"
] | null | null | null |
__init__.py
|
kkew3/torchprobe
|
8cd409ad5c1bebb8f2361188b511a7b91fd7ea8d
|
[
"MIT"
] | null | null | null |
__init__.py
|
kkew3/torchprobe
|
8cd409ad5c1bebb8f2361188b511a7b91fd7ea8d
|
[
"MIT"
] | null | null | null |
import torchprobe.probe
import torchprobe.inst
| 15.666667
| 23
| 0.87234
| 6
| 47
| 6.833333
| 0.666667
| 0.780488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 2
| 24
| 23.5
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7384b3634e14e4e0f53f6b441195113c93126596
| 103
|
py
|
Python
|
corehq/ex-submodules/auditcare/decorators/__init__.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2020-05-05T13:10:01.000Z
|
2020-05-05T13:10:01.000Z
|
corehq/ex-submodules/auditcare/decorators/__init__.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2019-12-09T14:00:14.000Z
|
2019-12-09T14:00:14.000Z
|
corehq/ex-submodules/auditcare/decorators/__init__.py
|
MaciejChoromanski/commcare-hq
|
fd7f65362d56d73b75a2c20d2afeabbc70876867
|
[
"BSD-3-Clause"
] | 5
|
2015-11-30T13:12:45.000Z
|
2019-07-01T19:27:07.000Z
|
from __future__ import absolute_import
from .login import watch_login
from .login import watch_logout
| 20.6
| 38
| 0.854369
| 15
| 103
| 5.4
| 0.466667
| 0.222222
| 0.37037
| 0.493827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126214
| 103
| 4
| 39
| 25.75
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
73acae27d990034d889c4e140159eabb4a319c63
| 41
|
py
|
Python
|
src/lib/fractions.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/fractions.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/fractions.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("fractions")
| 20.5
| 40
| 0.780488
| 6
| 41
| 4.5
| 0.666667
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fb7bbaeb9712b8aa1883dda778ee6d9f877dd999
| 7,864
|
py
|
Python
|
learning_and_development/views.py
|
Atwinenickson/lendsuphumanresourcemanagement
|
b46df164d59a4e94300376d679e07bd9a60d6343
|
[
"MIT",
"Unlicense"
] | 36
|
2019-11-26T11:46:32.000Z
|
2022-02-17T13:18:18.000Z
|
learning_and_development/views.py
|
Atwinenickson/lendsuphumanresourcemanagement
|
b46df164d59a4e94300376d679e07bd9a60d6343
|
[
"MIT",
"Unlicense"
] | 13
|
2020-02-14T09:30:16.000Z
|
2022-03-12T00:58:09.000Z
|
learning_and_development/views.py
|
Atwinenickson/lendsuphumanresourcemanagement
|
b46df164d59a4e94300376d679e07bd9a60d6343
|
[
"MIT",
"Unlicense"
] | 16
|
2019-06-14T12:11:29.000Z
|
2022-02-14T15:16:07.000Z
|
from django.http import HttpResponseRedirect
from django.shortcuts import render
# Create your views here.
from django.urls import reverse
from ems_admin.decorators import log_activity
from ems_auth.decorators import hr_required
from learning_and_development.models import Resource
from learning_and_development.selectors import get_all_resources, get_resource
from organisation_details.selectors import get_all_departments, get_department
@hr_required
@log_activity
def manage_resources_page(request):
if request.POST and request.FILES:
name = request.POST.get('name')
year_published = request.POST.get('year_published')
producer = request.POST.get('producer')
file_format = request.POST.get('file_format')
department_id = request.POST.get('department')
department = get_department(department_id)
description = request.POST.get('description')
file = request.FILES.get('file')
Resource.objects.create(
name=name,
year_published=year_published,
producer=producer,
file_format=file_format,
department=department,
description=description,
file=file,
)
return HttpResponseRedirect(reverse(manage_resources_page))
resources = get_all_resources()
departments = get_all_departments()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": resources,
}
return render(request, 'learning_and_development/manage_resources.html', context)
@log_activity
def edit_resource_page(request, resource_id):
resource = get_resource(resource_id)
if request.POST:
name = request.POST.get('name')
year_published = request.POST.get('year_published')
producer = request.POST.get('producer')
file_format = request.POST.get('file_format')
department_id = request.POST.get('department')
department = get_department(department_id)
description = request.POST.get('description')
file = request.FILES.get('file')
if not file:
file = resource.file
Resource.objects.filter(id=resource_id).update(
name=name,
year_published=year_published,
producer=producer,
file_format=file_format,
department=department,
description=description,
file=file,
)
return HttpResponseRedirect(reverse(manage_resources_page))
departments = get_all_departments()
context = {
"learning_and_development_page": "active",
"resource": resource,
"departments": departments
}
return render(request, 'learning_and_development/edit_resource.html', context)
@log_activity
def delete_resource(request, resource_id):
resource = get_resource(resource_id)
resource.delete()
return HttpResponseRedirect(reverse(manage_resources_page))
@log_activity
def resources_page(request):
if request.POST:
department_id = request.POST.get('department_id')
if department_id:
try:
department = get_department(department_id)
except ValueError:
return HttpResponseRedirect(reverse(resources_page))
department = get_department(department_id)
departments = get_all_departments()
departments = get_all_departments()
resources = get_all_resources()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": Resource.objects.filter(department=department)
}
return render(request, 'learning_and_development/resources.html', context)
else:
return HttpResponseRedirect(reverse(resources_page))
departments = get_all_departments()
resources = get_all_resources()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": resources
}
return render(request, 'learning_and_development/resources.html', context)
@log_activity
def books_page(request):
if request.POST:
department_id = request.POST.get('department_id')
if department_id:
try:
department = get_department(department_id)
except ValueError:
return HttpResponseRedirect(reverse(books_page))
department = get_department(department_id)
departments = get_all_departments()
departments = get_all_departments()
resources = get_all_resources()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": Resource.objects.filter(department=department, file_format="book")
}
return render(request, 'learning_and_development/books.html', context)
else:
return HttpResponseRedirect(reverse(books_page))
books = Resource.objects.filter(file_format='book')
departments = get_all_departments()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": books
}
return render(request, 'learning_and_development/books.html', context)
@log_activity
def videos_page(request):
if request.POST:
department_id = request.POST.get('department_id')
if department_id:
try:
department = get_department(department_id)
except ValueError:
return HttpResponseRedirect(reverse(videos_page))
department = get_department(department_id)
departments = get_all_departments()
departments = get_all_departments()
resources = get_all_resources()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": Resource.objects.filter(department=department, file_format="video")
}
return render(request, 'learning_and_development/videos.html', context)
else:
return HttpResponseRedirect(reverse(videos_page))
videos = Resource.objects.filter(file_format='video')
departments = get_all_departments()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": videos
}
return render(request, 'learning_and_development/videos.html', context)
@log_activity
def audios_page(request):
if request.POST:
department_id = request.POST.get('department_id')
if department_id:
try:
department = get_department(department_id)
except ValueError:
return HttpResponseRedirect(reverse(videos_page))
department = get_department(department_id)
departments = get_all_departments()
departments = get_all_departments()
resources = get_all_resources()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": Resource.objects.filter(department=department, file_format='audio')
}
return render(request, 'learning_and_development/audios.html', context)
else:
return HttpResponseRedirect(reverse(videos_page))
audios = Resource.objects.filter(file_format='audio')
departments = get_all_departments()
context = {
"learning_and_development_page": "active",
"departments": departments,
"resources": audios
}
return render(request, 'learning_and_development/audios.html', context)
| 34.643172
| 96
| 0.653357
| 758
| 7,864
| 6.513193
| 0.091029
| 0.058335
| 0.098035
| 0.0794
| 0.851327
| 0.817095
| 0.762406
| 0.762406
| 0.724732
| 0.638242
| 0
| 0
| 0.260046
| 7,864
| 226
| 97
| 34.79646
| 0.848428
| 0.002925
| 0
| 0.736842
| 0
| 0
| 0.144661
| 0.085598
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036842
| false
| 0
| 0.042105
| 0
| 0.189474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbc4ea0283ae59c67d35ace95de2035c9bd1e189
| 217,155
|
py
|
Python
|
src/cloudlight/tests/data_enc2.py
|
joigno/cloudlight
|
8a6510047abd97e0bf3a568322205beb56fa5260
|
[
"BSD-3-Clause"
] | 3
|
2020-08-21T00:18:50.000Z
|
2020-10-21T17:40:47.000Z
|
src/cloudlight/tests/data_enc2.py
|
joigno/cloudlight
|
8a6510047abd97e0bf3a568322205beb56fa5260
|
[
"BSD-3-Clause"
] | null | null | null |
src/cloudlight/tests/data_enc2.py
|
joigno/cloudlight
|
8a6510047abd97e0bf3a568322205beb56fa5260
|
[
"BSD-3-Clause"
] | null | null | null |
compressed_graph = '''
eNo0XVdDMrESfd9fEXOTLM2KgErXtYI0sWMBC0VFQf0U9cffOWfvfTkJqZNJNplJJuE//Yfxw7T78XBz3/3ovj98LLzNvP+o3cO2ag8+VfltqtSaWlnaWEluLKfEs7wk0d7DprItT6fmld329NVErQv+KjMvTl8Z61Uflc0DjWBf6Srwn1e9V2VA2qs+KPMM9Dtetae2ACde9U7pFNCcedVbpdeAda/aVXobOO9Vx2oOIGlflYkBJfBZaQvMedUX5ZrAH686UuYPOPaqT6oJcDOvOlBRgJWyhyx2iLZU/yl9CjRLXvVLbQD0slf9UK4H1BOv+qnsDKil7Kmy88CiV31XA4COeNU3pV+A7tyrTpTRQCH/T7k4cder/qjIBvDRq/6ysYJXXvVb6TFw6lVnyn0DjTBtR60CzKVX3VXJLaB+f/CqgdIDoLBhWyUAN15VOiUBNFmvuqU2AVJVCQwqKedVy2h5WQlVNdZcY8115W8CJUFVLQOkkEMmOGSCAzbkAA2pqAAgSffY3j120T6i9lXfq56oRYDEnypXBRrhVZuFtdmpx+oIoKXEFnqypQ696pHyHVBLJzXUHUCa01T6hPjlVa+V3gQ6qemGIYLvXrWjzBpQeHeFxl6xFy+UbQAl2SWH2SWH2RnpOCMd5xhy58p4VccudyDfVyYDtPte1agLgPSYVToBXPGqGl2t1YOn2w1l98Q5QiPmlLkGWilzQcUBMq4W4VsE9xPKdYHCzHl1BpAPIKbMAdCXVHHQHueAiyi9Bax51aiyc0AngyytzCswKi3IKNMnSvJVFVkAOhkUKXxcKVSwAt8Kakkq/QgU7xJiluBb5nBeVkZSFdDOAtpZhK8IX44fbA4fbB6jIY8e3VB6HWiTXjWLVmURtUaerqFX15U2wGdhi3wkG+J8CHd0+wvV6fY/ZQUnDHoDj3X7XZmZOFOl2+L8qciiTCDHak1+zMJM38oci/PLXD/KJcV5CFl/z7C+suiOR/7oEm+JMokkxOmpnuALg56lW3T7Ncw+Vv6yOEPGDIhPxBHw6kb5V+KU8WPpUWmZ7q4qqiB4qOzUM74W9hh/TumKOE75EXF8+YCMb4RdxrcYTMaPhQniDIuEP6LKF1yQoo2PUWL8hDJGnHlGrCiTEifJ+CXlL4gjnfUiTlpppMgoi1yrCplSyu2Ls8EfWaWj4qzJfGf8dbUkWFD6U5ximDVHQvLoRn2VYhU9knCnzgVvMV6M35XPw/iPym2I01clwXv59o3/IFOO8UdhhEyoHXEG5MQQc4Hxx2EBr6zmOUz3wmZNMTEZ/50/ZAAcijORCcP4/0jGlwILZapFbZ8ywoz/oxqCv2Ep34yfMe0fcZOVbJHpJQwe45dJ5Q6TyowpGMjsZPxt5Z+KcyBj1vgVzD/G31MW5O+zrJoy9+LUFciqKsQfquhMnFbI+iMma5D6JuNPlB2Kc0rGt5WP+o9D/l8w2SUJOFNg/7nMH8a/VgaU3SiDFnVY5JXEuDe1fAQ8SgJN8xtO8hhop+9w/OQ6nPU0MQNMD4Fu/MxkH69wdIpZ/Y15OEsnjGr/0tnLsfB6lynWIgx8vgidG8aNduHETBjWDh2SYcoJlp+PMdD5Mn4sP5VFmb30Pyc8198DWdiszctKY60MtSVxivIFWltQT4LrypyIs6bMkThZyW7tBjFFXFUzwQz9MuVtirPMH0vEpEwh1sr3gbLmpS+tTbCqRXUtuMBEUXUvGJGJzto4Q2JEyxCj/gR9zC7WOqV3xZlT/o2nJ1m0YLKmUhKkw5gr5X7E6WBpslbWnENxrrFiWnvOUs+Ub8S55I8L4jGxrb4FT1VF8EQhQ1P5K+I0ZPBYe8RELUbLOrsvTlXlBWU9Rl21MGyfyfbUnqAsvBFxDhi0DdHL2kAh1a6yNXF2GFNWkZg4JaU/xNlSZlWcTalUTx5lWFr7h4VVT+6V3hHnQb5ua2W6zYnzrRz67xfihbU/LE/mctD1ocxYnC/1KvgPk6u1E6Xj4rwx3Ts7Zxr28As+MWuf8TlY+6r0mThjBcKGIuRYO2CeJ/JiJGu+tQ/sn3uG94mPaiTYpf9WPmxrRT4sitNTNuotqNNdAbNQAn6vC+qLArC2LDhfE8jP8PtpX9AxwM/mkfptU7CeFugvCWzeIFn3gEWgZFM8A2Z6DG8I2sDAf5smsrLLiOBKEaVuoLzNocACs9zOAc8rKKRAKitIZ9w9cLXNcFBlk1coJC6we4I8Wz6w8SLYQjN2/4H22z8UfccEXRaxDf8AzbPdNRSxg+DZIhqNyo1Fc/Uz6LX+lmD6EwGxMuk7BCb6gplVpFhCsLMO/swlSZgHh9FoP/kBL4rWOTQglUEFz/DWUbId5kjTPf2s/Qu02sUJknRQ9BHavLxGahC3cSxwwPo6j0jVQvEaTTaDNwTLRCZ1r6IC/Y4KlsHIAshtoClugJqyYMFLnd0eE4yj2XV0R/aOXfgluLfP+FfiM/DsnX521iF4lEWkW8NY0s091BzDWMqCLNskR17/SNAD/EfzRCb/uhDcTiH/zQLqHKGvbf8J/jL67wIBF2i8n+IAOgdBF2iK7qMBmSPmS6CMEwy3EsjW9R+SYxH5gYFr91h0bokDfwXYRLHr4Jg5a8KLlpkbDYxNBZfAf7OGwarbILUBIvU/8OHihZRiOBfI6H3w0KYRbNIj4Dmo9HPomBI+DZcF85MINU+OvAS7zDabG8MocXGwbkuzQLT36xplWPRSCT2p98AVM4fRtTAQiGZRbQwJ7ANGRg2jfBlsNxqdVMPYAm1vbA0z3aL9tg5eFMi5dYxL20BLN/jNnrGyCujTVfb1KUakCc7oR0PMEaozE6SxY3yH4Rf4gi/E3SN5rs04dHCtwY8KdLsIe3L5h4WAeWfomzQ/zwAcNS3EPWCUfmO4LIOfFUwSR+iuAr6qKECXOBIseLyFHtK9KmpwaPAC5qYcxkuNqU7J1kGVrMKYWECJ5gIU+Ivo2mVQalcxgpbBEZeO8HsdkUdo6PKGwADzzSUnzBvwYBHtrmAI6SVy5gNx9gajT3/gi9In7KAx2nmFD2PCxq8zjvNDDjwJkMrU8FX7UZZ3x4nwGB9kJs4ATDdp9IK940jn3JEDX3fxlS2yg+qcDY7RB7E+06K+AH1grlBJMMb4RsdmMB8WMTZiGDTXyOkv4NNY5PiN/pIm9vI12pVAcAnJiiAuhvxljMIcBvk5GLDAdpUxQbsiJ+4EWpBA0iYn0z6au7HCjwDzjv0sc85B9/4DPx5vQUGGXYpyEsf8JhDgz6FJpg8aY2D9PteJHXZvDZww1/hKyhy3PQx4fpD2CXH7n2wV+lF3QOf+gGViWOTw2/zDKMiBS7kqSwBRY7Q6hwYXRKbtJLG0m84K5BrTWYZU21kS+cd0Mtg7MR1ROPfEScnSbjqrELQ7WVm+TWdDIeM6pPTOmohgplNU7kAc0TZEAu/kReIxnZyyIvJ3tExt4sypiEjKHR/qRkdEL5GXO1aUYtMRjUnk8k5coQRRiUUs7kQhVncilLE7iyRqQXRL05mHUtBJqJzgEfRM02mJAGE6IpX/itMIw0QsL4pzgh0B0zkOf7XDuEsIT6ZzEQaeh/nOZJES5yb8dU0doXOl+oId7BmYzpbMQKYjohQaVYb+aDol5Yt+0dlVDnE71HE620qjwYHCj4ryJ+IcKB8s34cG0tmD2m469ZCAmhJNsXMYcqQaphM1SfSxzhSaTGeCbQ7TeWPeL5GTTEckMPTDp4jVpvMR/vhlB/6QpTNl0QPfyqLyP5ESxLkL6+vJ4iZOlxy+Zaa+LP7iPLK4B8hwpnOvVgWfFMgZYbPAdIbYXTOdgUgy4ryG3ByzhJewr59RhcuuQUN12XU1ENzAfoLLZkWEc1kZEz1x8rKQiIOdB5cV3VTUj+ySzC3iLENvcFkZlXfiJBXKW1UfginIkS4rmkFLnAy6yGUjkMBdNooNIZeNiTjrsnEIpi6bUGuC80pfiwPpRZxFGT4uOyeqvMvqMI8RDc1loR+4rAuDfNFjXPZM2X/inCvgBYMuZWC7bEe0CJe9Cum4lmHrsjcyOFy2oW4Fm6Jzuiw2uFz2SKHWtgI9x+pL8ESWVnFOlRO1LLsn66A4GBcue4AtH5eVISMIfcBlD5V/KI7oA4J1pe/FKZHUcsinTXUquCW6sMsGIae3w0bIaASrRTsA5d+ia7jsjIz8CdP9hiz5Uz7SvbGBItOLfpidQgVw2Xf58FxWxldNHOzguOyXcpfiDLDd67JD5YPhI+hLLvtEJjyzoBd29zjs01dGyFgDxV3lRHPN9qDqu+wd9vZc9h4KuMs+YLvLZR/Z63216OmUhh6+reYDATOsCLb+BJys8dtKtFJprXSw4FZBsL4hkNpGgu6d4E1ZIPIAuEDWYwG7tih4+sICkShygExjgV3EuLccApHz+Qfpu23mWgOOkqjrbx7pGj783y0UtBeDP/csuIHq7BKq+mF9lSkoqyHLABXogx6wdIuMb3ngSZlFRVFnCgEOXr20yVLfUUrqEOGNNPzf4EYJzVxEsSWAH7JoChZFfpFv5x/StjT8yTiwiNx6HxRnmwJLGQAqzII9jRv4tlCYiNvbKj5B4ShFf+0hCozK7qMhE7TMDRcEc+gPPQQNbLT7R/4PUY5uoO0X56BsUgU56SPBNZS5hdL1rERE+20fXagjZPQ+CPJTr8g5x/w9oB3cA2dfwA3ULNKjAPo9yuzuDd4VkPDHDgo+kTsO+groJfOC/tQVK/gGbunqo+CRz6rWUfAqe3mGduhnlH+GDtcvc/Ai3RkyL6BS886xEEURC8sC36es1KCcHeT1fVC0gPqj8Jl7sF7vYgD6CfDZRDuCm2jNOodDlCMujnQ7S0i8XhSsgFVnGKcb6LIoohe6CMNIsllwVpdR8QLjK2wQyPHTGNJr4M8OB+oEn4ypMMXxECmi6Lpfjr0Eutnk8NnpO9CnzR4TzpgQw0xvA9vIGcMnFLsCoNAYxtATefmI0WVW0BXmH0lc5oisomtF3Jd0SBHDgIqhzkX0VAxDIQZ6yZsER/Uxh2/mA/4IuigBXiQ4oo4dyAJtiTbTgmMZDiKRegVP0Mxfdss2avWXOAwMCUvgQ3M+ZhP7hE+ljCHnRuhbG4V/v89CTj0d+Krq6SWuxLM/rO8zWWJFRpjNhJni/GCKMzOZWWWBn32oV8FP7PPN/ql9wS+RqMV5E+6KM8HSOZtK2eK8h2HPMjuI80JxYDbGAYGZvULGmg2wLTsbYoGfjXAiYGZPspCZ2T3OaMzsATO9mT1CRJv1RScT55aCxKxLGno4VTKzO5LfUSLUzK6UD+c6LOEGctvsjGSfQ+qbXahrwUvsgJuZyE+o7jhMfMJiTnEwYGYNZUVQmDUpKMxayolIOTsKC6+qC8FDioazWkhXnbLnbA8Czmxf+SJ7zQ6UQ1RFJGtxApy0mdk2d5RnOyK4i7OreoIlSGKzMvfGZ5vKlMTZUqYtTk5mOnHyKiLSy6wAYWlWDDtlDad+ZsZt8dmGmglmcY5nZqsieIuTghxiZmnyKkNcwgGHmS2H1K7I8m5mSdURTCgfNcwrIwLmbAEbx7OIckgdxamTmcWURqvi3PSdmZA+G9LnKNjOfKXRV3Pc8p9p7DPqwKkd79Di0BC46x0a+g39Pv0+/U7kCQGp5XCOwXMM1vRr+ufV/BPQScoEwxMMX5RvCVj0DkVZ2AXqE+8wyiRRJonQH6E/Tn+c/hikC+Cld5hSGwApfFX6WEDiM/Bl4EvDl4ZvGb5l+JbgW4IvyTKTLHOF/hX688r4wLF3mEMvCj56h0VVBqS9w4J0jIDxDtdR1jrKWoNvDT7p0QWglpZtIHRDQv18Vu0JbmAI+vl1NRJcEw1bHFFcTsUpyIfk5/PK3ouTky/bz4t6hEwrjFnGRqqfXwK3/XwGHe3n01AM/HxKuTVxcB7j5+MQjv18TAaEn4+KzuTnRUSdF2dRlhVxRE8TnIf+4OcTOEj181ohZk5END/vKz8njoN46+exx+7njZoIXqqW4IXMBX7+XGZUcc5w+uPnbyDj+nn5ir/FuVIOregoRB3hbMnPt2SxFEe+z4E48rUuinMKxcLPn6h1wWPVFmyrFUH5BOviHIiiLc4+dp79/J7oX36+ToJqMkX4+UOoiX6+iqM/P7+FbX0/v6kiCXHK2ID28yXI7H5+lxzdURrZtnEk5ecDHOwcbqsuQAZTgI4NhG+HIrJa4j/vcEcIEhBl4LCMMVDGGChBgBWU+C1k3cJ2+eGmzF4CaOe70mlxpqEzgZDt598UmPRF/v8LYz4x5fj5D3bdk3JwRvwxRBV+fqB8DJbXMPk4pPyFHfUchokAK9gTHdTPd8Og29DpK3DvUYavn38Ig+7h6GAOSnDlAkeiS1PlnjwnCq8TzUQ03siROHFoTqILi2gsGq/I/qLxapGaRRVOCuKow4lGLTqUqNdW1ApRpS1+GUjsomRbUSFEy478iIMZz4mebkU5EQV+VXBD5CFx8lAeRE+X1cqJCo96CspNxVmWDnadJRGQxZEvQRSezgrJSanIRJxVaCydDNSbTpqJ96kuiXKLair4kpyovS+CosQ8i1NVBlF15XbEqeHE0IkijbASqRItZk+cTRKyzawBjsWdKNiiRoh+jaBz5YvmJAq7AR2izL+JcxH+uqKKCI0dv25EWhSHGlqnGVLUYBlHOHp2nRa5dQwFsNMmN08V2H8S5h/K6uSg7oomKpqwvyWOjA7R7kTfNaINib5bFnxlG8bUmUSvFlVM9Oq+4F1YTg+KqOjVaNY9+daXb9OJvs1okR064nyzM36Z6gcKkOjtOU+3c7BTcZ2JsuiENyrQnXfIG64zxVaG63xCn+18UK3rfMk04ToywMuSuSCt0u11LH26vSal6naWQRtEKz2n20bpjjg+TsDajgf1cypyIo5WOCWfh4VPOyECnddRdmtX0N94EjSiCwq+3QALXcQW+oK6hNhNRLrJE3FAHDLrAfztL4Y8E1+Idwz/R/8jsU98IP4Qf4kz4h9xKniySu8bcUIMi/8QfDuHt5IAeSLnCXlDDYJXdxDSZkPGRYT/gjIzf8twpOxMmZCEPaHS+QyLzSNF5RT+rTiSJNmCJ9Du587gP98WjLcFHkCE+zpBnje0wqwdwp/yBffAtZcmABXaWhmY3kKWvbCBr8jyxIpjQ2aMAJvEeVb85RgOdI8sJ0US0zMSFBM8bCFgE0W+gqA9n9x+Z6VoYAUBl6DS95PA9CFTfAuu9wR2rgTSiLolu6PI+8j6o2iFvmeds2v4AzBBn8yziGW0YA6V2FWEv9QQUCbRq5sIAJ+2APozKniGMs4wrM46ANT8QkBfmjOOhfUSAjBQXthBN4vwgkN6AYWYpXs2Zg/tAiGXGALuGMTbj2v6DdEKtkleZI8ByO6mmjgnOLyglyybMs+U3T5midMw+QIRISdo8MkKfy8R2XuHFfpTgt0TejeI68Q14NU7/XkiBsH7Fb0BWrLOkfCIYeevobG6l0NIn0P7GuzKoCIfH0wGHZXJAljLAxi/hU9yDEJP+OHtgKKFK/m6F2BdpdtRWdp0W6S+d3HiWHN0OyZrjm3OlM2J8w17KNv8VU3BH8jetvmHPRzdbgv75NcEAqVtvuHUu/ke5ppC87DNT1mcbfND+X/ifDHBP+IQBoO2OZBJyTafsM1omyOc7TdfePDcfIaoYpuvoTMWEdk2u8x6C6ndNu9w7t/s4RC9+QCbg+a98vGjH2Z5hMBkm+fhrzNVF7zAmXPzCtOmbXYwCdrmjfIH4lxjk8o2m7AXaIq4tCTOUdj0Fk6tm8fENvGUlJ7IGmab+2Fpe0rfilPBjp1tHsiUbJuHCqVUmbjOkBrkf9sssyElHMU3tyBe2+Ym7Idsc1uWEdsMaI7QFIGoKM4Ow9bZgrWwtiyOypsbsIO0zTwEAtvMiYxhm0WIf7ZZIG3LPLBvLsGCzDaTOFNvrih9Lk5KNQRXob7ZZkZFUFAappG2GZVhJk4c+3W2GRMtWZx5NjxBXBQRzjYXsHlpmzSJsk2NfUTbtLDoaBoVwQ8/5LAsMDDq2uYCFKgfwV36d7jYiFr3JU6JQVsqBtuwTf44DE23qsoaceo0Ilv6UrDzqkFB0+39MMUek1fCHwci740jsETQKRGSZHgv/ZOKXPcHK3H3FwJVF9ZFrjvjetz9Y9AUm7zddyZ6E/667gR2Fq77T9mMOF8Qfbof2KXtfmLN746Y9on5BqwC5mWuO1YOGV5D51nJIt19kVHguj1pveveQpdy3S7W/+4jTJVctw+ZrXuPjXvXfVCmKc4F91y7lyTvDEqH655DUOheY8e4e4Mt166MYxG2ulfqXBAWJq57JJ+d6zZIUhO7uN0T1nUKA13XbZPu47DAA8gf3QpkiO4eubJPybNbg8rousL2b3GqYeChioALm6xsC0x23ZJCujJxh7vF3V1YxrpuoJBnGxZP3Q2ME9fNkkCRTdPirEPzd90CxKVukbvT3Ry2Hlw3D8HZdVeUCKfdJNm6BLtK111moWkFDmXY0NWQtlToxDA0XDeuQHZEWZE9u1FydwHHNa67GFKSUIiZJ2pWMEeJuevINx92p65rGAOtTGa2zD7wE3695eD/WRY8QPDBLkKHb4JupQP/ZVvw9BYwxO/DCXIMB4Lm6kFQlmwJjllkkTn7Xf1sMQ4p/gxCL5+QInfJqm7pfyGOETuICn6sI9KfMPgR/ilC3GYfISmUaF//GDtFFa8A1KCDd6Tb/wGunaLmaYvpWFFtJLjGwnfR4B/kNpMlwYc9hD6iUT87zP0PAUly5Jv+Hw1ML7FUlOeOikh+BIgxbpPlBYgrARs+iUJDXBIM/EkixXmR7LkCNahMZ0HTGnKY4yziPtAke3EHrJF2EuxEnHlXC4alfgn6jkxabwimVxgMWtOrAtEEctyfsgxwZP0TFbzcIDiCyPU6yyRXl+ZZHPrXXqMWt1wA1veYBtWcHSD/HLpZf64heAdtqiFnGt1UW2RdaOMd2XSJxvhpcmE5g8hVNGwBNA7ZgdsceNtMsY12xZDA9D+AqzXyie3ILJCyFDGNkAuOyMMxkUPocEp854i7o/+L+EEEDwbfAl3ktL8ocRVNNStl4DZbtrHC6ooMB402grboxAbDe4LXGML2CY3b7zIOJraw8LXVEbRxW33Coa2tDrAkVYdYy6tj5bfFeRXlzVaf1ZbgC3asbLWHOcxW77C2Vm+VK4jThU5oq49qWZb8al8malu9V4uCD9iFs9UfrqTVX1iewfgeYSLfiNQBO32RQWDhj1/v2Hiw1TcsldWJ6IYWVwWcOF9hrg9sLdgqDFZt9QBTpK1W1KJIDdU9aNK2uh8G1pRDGXWcjNpqVckSXT1UZ4KbWM9sdUvmUFstQd+11TK2a2x1RzRXW91lTCAjSZxtiFAwbK+JcwZZonouyrGFkbyIGtUbBnWY6orCV7XFao4Y0YBUUG0y5AQHhLZ6Cimo2sbGma0eizYnTgwyRjVOjBCjxAVsFdjqIoSPaoJB87JI2KpW9lmcOfaZo31g1af5X9UoxNiQ/A02PMvOWCPT1pVBUIE/itjBstWccujUfNjrK8qCJ0n245LyU+Is80da5iVxMjLFirPKRqfAQV1fg1Kt6+u04IYpo65vELOyVup6jv48bSHrBciYui4rzoo4SyomuAI1uJ5UEaRbFeJ0PSUCmq6nZa3X9YzyRZKoR5gqSowpFBJXVkTnekL5XXHmWdYCsy9i703X57CPoOvUrOsiGYnOXbfqU1AEI9TvqyuvBgNJAevVhthhBg69GozTBQ692pMyv8ATr/Ysy7eA+F6kKwT0wKuNsWkgKFGvyPQq30ztFnv1gjbp1boouwcpU1Akqdod1kngrleDGWbtQQa9gPNqj+oCIBF99L+gkdK+VQqA2mZKHxPPvdoPZCxBoecXlP6i2bU/0vMnjKi94eaO4I1Xw3UAASvZpjIkBOymV3uH953eD2T4YHWfbPEnWgSLcQHfq32JqFCD6XhtD2F7CNuHMCH459UORG4T2PFqFdFCgRJTxT6s4IZXO0TOQ/C7BqFOUAZura4iFaBUV8J3JihfaK0Mo3VBmRFqm6htE83bUh8A6fRaIN+pwKVX21ZvAOHwjvIDoFS+C6VKUELPWMMZajjH3hpwz6td4HwFKP5LJrlEkk4IMiPURFeZB0od1yLxCLiZV7th2hskg5BXa8qnDpBULdjKCoKDR2DJkbIHXq3NHG3kgL2ygDDqREVaQDT2FIw/ZedGyK8I2kkrC6CUHCPhMRIbx34xUIZXAg1MKCljHvJqbQGDZwGDZxG3nGpzLG5OxLGahqgq+OPVDL5awS+vZtl/FsQ6dJJjlI9bPYKSYA29tiZTY21dRdaBVkbBBpiygY3gWla5LaCWdLn/4adXy9Obl7mrVsCWvKDRXq3Ifi2KiOTVlkC3QNerLctUJgB+r4BFK6g1Kd+zgAyoVYSllPkgSv1pUXUE9JJXy6gyIO0ZWRiOBT9EOBPnlz/kA/Gmv7wvIWuPWZVP/kxo1/XzcDa6CJ1LNEXXOzjK0PUrzlfXxJswQUPpf+I0GdaiaXT/kvNn/wKl274UGBPnDLNt/wbKje2jCNu/gsLY74Spj5QRlbPfwvGL7TexS2v7DdqC90854fdPMDv3j6G09tsioIlTgdF9/0DGm+3vU9vt73E7oF/HsYDt11jToegBtl/FXr/tb2Gc2P4ml+5+Gbd7bL+EJam/y9Q7yqyLs82Fqx/AYL3/y+sC/R+cudj+jJR8hy3745rXfw9TTLHrbfsT1viGpbP/BZ26/4+t/VQo+gN26n2RNEQ06A+5KPUHvGvQf1Vg4Tgs7EW+Ddt/xuLWvwv5hDuKtt8Nf9xiZNh+P+Tdo0yTti/Sxac499CkdPIP6oVOfstA1nUwQyd/oNPo5K/I0OJ8YFs1+YkLT8l/XHuSX2iXTuKkQidxPKyTU+Z8D7M806mXROPWybHaFHxlqkGYcRg6Izr1ModQ8j4MfGBBjzg41sm+QnW3YbpNDL9kL4y5C1mbxU5Kf4NKf38NQ832i+RGnphjfyWVyFv9Ffp5VaO/RCbxDkc/LV+T7fNuR3+V/rgyD+LE2LFRbgr1IyLlirOIZcH2F0JnHufftg+lTCcDtNr2NYueUxj1uwoDzgeLbd9xgFvWAwXN4LJjRJwSjsdFfjJRcXZw5S55KUwXeAPMeckLXLcUFErgT3tJ+WgegGcA+X2GLRZBWbUFpe6dPmQkvfOorKS5wjQIvAbagkTcY55JHuHuqKCdAfWPl2xBYRVMAiRFExOP4ApA4huYDoFvQKu95CnWVUFj6T/3kidYkASjQ6A/8pLHMtkIaME2SW+D9DZIr2CMAveBVhIfsIoDfChAqWJfmYB4THzyknsocA+F7LG8OvPUmafOPDWG1EhoDSG6M4/B1UkQF2Wa050FSG26E5VFMnnIDIcs4jDMEJdw3Ynh6FDj8OpMHCNrjO746JcqeVMFb6oqkpHgOZatiXkk2VLGLcGJX0lQkUO5U4Ao1lnndlBnDZtfupOFuWVyE83aRLM2hTeaVq26s4pvsJPhyTvsXQVxWJ3EWaMA2l9m8iQrXhExKllCXAkllZhgF0d3gs8Af+Ald2TpEHgCoAe2sR4JfgLqnoxnDIRAZigBkf9wmvToybQgY17QvwO6BYngx90Zs+4hzmo6A+Va4uCyIqaTRSAy/IQZ+JF37hU41g+T8nIkjqVaXnKm9gHCdCAYcCfDUXd6oEjDUFRivpHoG9GCRxI8U+DwNxn2KxzQHeyd6c4nO/qD+MVa/hEnmE9wUiUfL0xZBTmLdQ6JVWKdKBKX4L4aCu7RXyFHDuT71h1aMupOwKS74P0faPtjA/5wQq47ZXZaCWqB7mzJFKE7V9DyZMpsAF4B0gu6c8MhxzW0c65OBc9ETRWHM8EUdgeCTwBw+jhkH41edOeU2U6UiPA4SvQwOzeI+0ADco+YpkUFMYdbgjKf7wPAyi9oj7lT3l7LYd9Y5v8GQPtAIfMJiZ8kmYBoTjKLu1ugXQfGYp5M7+YaaD+Iwo8BZ5kBZ5kBdm9lPWgAXgEYfGP8HqNZY7bwBb9fWOsLUj1zDD9jOD9j0rhD/B3j7xDf4+jucYLoIaCLBN3QhwpuVSQO/ASIRpjbgdmALDR+Axh5BjoZ9g+kXjSKZSCov2dH3oMZ97w6WdxQfkYc3A00xTXiOqZTUyzwR5GYI+ahtpniCgyZi0nRJ0xxCQKBKS4ziIY8RRryFFeJKRhcFWPEOEMixCiRO9imuBiWkeBt3eI8ozQnlB0ftrumOAerpaKD6m+KPvQSUzSsUiYyGfo7Vtitdwx4ZkQ11jIsdnAHWO/EMMZ3ojC0Lv6waGxCmOI3s89o7FT844+pchNx3vnjDbdsixP4RcnWyPgFNpviB+NpZFYcQcAsPkGANsUBLusWh4wYYyEsvuJ83RSfQ4pfGNODLVXxjgJq8RZXaou0DC8+wtqs2GeieyZ6oP+CeImZwhTP+eMat6VN8QYyjCl2cPu1eAUjsmILI8QUeXu2KCLlqjhNGoIVT2AyVTwNmd1mgmPiAbEiMpsp7sHwrcg7usUasR5WUiWNh8RNRohsidJL2JcxxXJY7g4J32VZAS4tFLfxje6kFKRvXNjWO6uYY3d4/XNnDaqwqTqcteudomgbekeWFPRcHmYLeicHq7eqxV2Fqmjv6OA4+wOX+VFoFL/qlGYE9dQT9X8LoJNefV4oETBePSHlCEhj6hiJAidePQajGUEncVg4BSQ0gqQR+ECwwJtXdwhzCLNCuIDo0HXIPgJSscY7AoI1r451U2Dew/5GBhhxXr0g656A72EP5BMocmydpgaCUmwWg0hQ5L/6BtTH+rro+wJSL75LbLB4dXxcAjtePQ21v55CfwMbXn1VpDIBUePrScj9dRpU1VdY0wqqls90AeiuvfoSCFpC6a8YH4KiLdex8gmASy/Mh8vk9WdZs+rY9qg/oV1PqGiESRaovfqQ3BjKjFDHFkodX0IdI1lA+P8IkU1QhLU6xrSAtAx7HgLopTvQcoffPXZjD9cj6l0wuitfWB1PDAhI/X+ioAqIglr/hfeX3h94f+id4VsTFAJwxiQgP3FmJSBU/4OAJ+gk/hNCgKAvhX+omAW6J6/Ox0WA0u0yJ5SB6I8JCp3Ih11/g12VoPRrHYOD34ioHHGvTplLUN959Sp2ywTBxwobVgH7DqDiCdqhV9/H3Q9BX/LvMXwPt/xF3J4HoC07OIkRtDIwtlnWNvoEa3Qd1lv1LfjwTUL/eAFa6fwSAkqIx1UJ6LAtT/TZEUCqvAJLruDD9CHK7yIAYwfzjYBkPGdt54g/g7or6MtIOVWrAHPp1U9UBSAJjtllxyi1DdrbGAxUdAUlvoXRC9VZQMhpYJg0oF/kUzCksvlVXlrOZ6AT5tPQcmEDCGcJOk8+iSnC5lcwWC0MB9vi5EKnGAby0DW/jjvx+TWclOaz0AMs7BB3xLHchc5j5bB5H9+CzTusizY/h90Om9fY+c3PY920+QSWfZtfhEqbX8Dhls1HqazlI2HmOImNYVabFaj7zYpSgp7llC9q4wwveejZBkyH9AyveOjZGuPXVaQvTlr5IvjMYLypZ6uqLZiST8HgsZCO/EhCrJwt4aqGni1zt3K2QIu22SKEw5mQ+CPOPOvBvXs9i4dBEdzlmEVVIOhobjTzIUPOZA4VgXJmseeiZxoWTzNp/ZU05BOrrc1/QLvLy2oHXvyDhUB+AnU7/4Z78fl3FUF/TGE3rWcneHPG5v+gqM3amL9nx0w8g9aa/4YiYvO/+NRs/ofd+iBpLSzzXsTph85j6HTBN5vH925p6GfzvZDxL6TjOUz2Gnb7mIQOw7BB6Dxh/83mRyRVKJkXp80BdKomgidhDU08F5BvwI7C5o9IKs0M8lfY7sh3VAwD5oZ0XDP7OWk/445L/jIs5SKkbRvPCuQDdJPN74oKZfM76ECbL7OAEuRAm98KU2/CEtXmDxlVDcPqHLk1Mmafmxr5PYUmVsKRjUNig1dppt51jjtZbYpT7RiOyE07GjqR0FnE3qppL6iuIHbETTvBm2RtHSaYw9XCth/+cKFjsfK38V6DaWd5Pa29Ecash84axrxpF1VLkMJhOx/G5LDz1k6qKEpdoYl1mxJhe4mX99qZMF06dFKhswpHv01kIF/nMaGa9hMGqmmPYDZh2rAGNO0BbEFM+xXG/+2xaBGm/YLrie1n+u9gNd/u0Sa93aVE0L7FpSbT7quVF3Ee8dIKXrEBxRSs2ry514ZOZdozGSmm/Y2c+u0Ne1Gm/cf4d9wTbNNc1LQnuHvZfsPbLO0v1vmPGT9h2GnaH7xT0K7gCZL2AW3n2/uw4zDtPd4JbNfxYbdr4Q+RoxriVMnFLdKxGaYuY0CYdolhuzDkb++EMdsMCkjaJcSpNiXD9jlkyPYZabqBRa1ph7cp21e0om93SPpRWEqLz9e0mzg+Mu1GGHgaOifYeTHtY8ha7Tbks7cpn/UpVLF9p98+oTYWtjBnvH1gG62wydkod4w3Sd6+lBtLWBlT09s/CN6FkrRAv83o38Um29s3/TuyVuq3X1qmFLb54wcbJroQMBVMQnThQj4Lnb5B4YVzJEpfcyu2cIYJr3CDL0H3FnFHpLegpoLz2M/UPe6L9OJETou9KJHHRz3a9Oue4wtqPb6q0jO4Hqh7Gls/ujeH6ntFxhRkzdS9fJg4y1rw+I/u8airtxZGZEInDeW7l4JUo3uruOire0movr0ViH26t4w9e91bCqPq3Abo1ZSTdaB3GAZWuAPdO8BlYN3bZ417eAWrt6t8mdt7O6x4O6wRIoDubWFbpbfJiHIYUcKpru7dYA3pUQnvdXA61ruk/4J4zpAzsuUUvds7of+Y4W2R4HXviCEt7Dr0mnihRfcaTPoqA0X3xuiY3gvXmN4zS33C2OiNsJfYG7J3+tjF6D2SrQ/Klw7t3XNvtnenIrKs9XoQhHSvi7W0d8va/1jvLz4V3fvBWWBvhsu9uvfN88PeF04DdI97IL1P4gd75l35YMc0TMb38tJllJkuYU9Sp3fRJekdyGe+Taqx4Aos5u2yOhJckoXFtxncYbBp3EjxbQpW7HYV236+zcoX4NsNPGXm23V8+r5dUx+CRVwcsAVMR77NY/bzbQ5d7lvefvXtHNQm3/qw+8eLOAVxLOzbLbQJ38ZxhcHGSEE0jI4I83y7KGPetwuyxPp2HmK/z9d4zGQV34SZpHCSaCZpyOVmklH+rTjL1PInK7B/nySpwE5yYcJ86BRCpwj1cbIW/liXVpjJBi4kT7JhkOGrUBPLOz0Tx4vXEx/7xGYyx9tHE435epKASmMm87jJPcHDJuIsQlcxkwhmqkmUyWLqRzCuIjvSjnfIEr6d8gpA+MyNb9/I0K+QXf9Cjn6SRR8y+nz7i5nXtzjd8fGgDjpORH5QcoapcoJjejO5wM0J+yfiqG/vZDz7thdW04U+4ttbGau+7Qu5vn3EAb1vH8jje9b1hMsJdkRahtgY9e0ABvm+fWVXj5XZF+cFt17sM+5R+FbkCS0ONp5922TWhow+357Sf6Iwho4xefm2zVouGXGBnSHfnisMK5G4L8S5CYm95rUOewWlwbcdZtqSQezbTVyXsWXlX4pTYjm7HKs7HELbJCxgeAVCiW8PFJi9rzBq9xhRI3WHYRlVbAqn97AFma6DyOskZNZ0DdNw+hDmasEczOkDTbREQ/SJOEV0QRQD0QURXnAO4pATXRBjgnmZzVyQgOl/sIhJzwUL/LGM/SkXLMHm0QUyXgVXmDiFaz4uWGX+DO5vuSCNidIF63iI0QVrYX1Z3LgONvDYmAvy2P1zQY65isQC7NKCLl6kcsFtmOcOG0cu6KlnwYcw7J6p+8okxXnExSMXDHklIBgw6gnbwC4Y4fKZC15CWp6VuRHnNXTGsGQLJuGPN1xaCN5lUnHBNAz6DCn/CH99hc4/Xv4IZuGvb+Xw61e5C3F+cO7ngj+8keaCMhZxF5RCgrdCZ5PEYe/bBQGZuhvWssMLL8F+2NY92DEGFeJBmOAQ2wUuqGKT2wX10KmR3iYLahCPWEGLeIwZ1AVtXkcPThl2gsnXBec0TgzOYNIYXJIRF7AzDK6YqkO84XX14BrLyQTbNFomBlmvZGLga1kR7CppmTJuBdOYx2Vik9lBT7C/qDHnyQo1weaGlulNhILJEiOWVV0EBL5pqmWKw0tjRRafYyF55eNNLhjH6EIfG9oy5+lrrzHF3pOgfvIa77CRFqx4jTcGvzF4gp04QZGIGjCmFohYr/Gl/Gtizmt8yJcvkPEan8j9A1Nowb7X+IUBiKDd9xrf0ioBd+M1ZuoNUBA6AiykjT/lokD96zVocSUIEu7ov6P/lvTdooYuvV1G8i1YQQnuqzvAvNfAS7ANvATbeGD0A6JH9I6Y60k1ANKcAaIGDBzCO6R3rCIdohT2KqKAwKLXeGYJz+TLC2wDBGWBaLSYp8XwI0wAglI3nmsQcNprNJmzyYacyHwhoJe9xql8ggIydBptVN5mEcfwHrPMC+a4YPAlvhTgntc4Y/gZw8+R/JzJr+G9pvdGloBGh9V2mOwKcVeM24QBG/DAa8h3BLBCKqRXgV2vUcZdK8EHr7GD7SHBT6+xi40CoBQUqOg3sCKR2xiWgjIUDpQ5BNqs16igwgrr3oPCI2gl9T5J2Gd4jf4aG1NH8joDqv9DJDmUb6kB7bGxAvt4wRWvkYS1rKAUuoT9UKAwdRkfO1Dy4fNpYIu+kcEpjCBSrIpkIBB4DZlhAb7X2EAzRboRtmYxkvH0aANXPhu48tlYl/VDQMZ8AdKyYERGdlGkBQFz6TVyagbQS14jD2+eXogHAjL850DJHE6mG06+PQEw24c41TCQkoBSjiWnYavViCFLDHTEsVcmKBJ2g6+3NvB6ayMKmqOkeQG7cY1F0LsIeqEMCCADLlY18JKhnpzA/k5PTimFTniVQk+O+W5fizPFEc9TJw0YMulJk7Lm5IZRHUipE5qITChHTy4hXYrcAT/kjgJehCgoP34kaCpdBAwENtbxewGh9ugWmGGK2zxSZ1cE9e+d4EoNkb0NZHyAd5QQLNaROLcp+Hcj4GZ7CLAp5Bt2kK6zINhHigf43MYzUhQqSKHfUEuyhfIbCLg9Aw5QgWugEJPsA+cvUMM8yiugXvtwyHp+SGWNLVtG+GUaBbwh5T0IWnlEuirqsfNNxB1MBKOozd6w8fL1S217IO4CrbafQ6RrLCEy0qEfpPjL4KD5uaH/QDBORrS3EJx2wINrYAlEuPEd25JhHUiuoyBRb8RQUxIMAZn635QIsszjCYKjAttkZBps2kZmX6OV+hHB/twn/F/k8gyceFhjpEFqtPICLPVTpOCxx4bEgaMvUNbchf8MNdoBGGQvXskg0nBO+j6ZEGX582z9YgC+YUCY5UuUW30hI04RotERroXRoReQ0GyiEZ9r5D7otzF0pJ+JMjKLSHDclpBnvcN0xyjkGYNV33A4nEQQvgTS30Chn/5kIduIrM+Q/w7DycYrTMhWZ7aIGHG+jzTRZzL/GxiwAQlf8AyURzm8qjkUcn7PLG1gYiR4jHbaIqovgJ7LDZYMSl7QsigbFgdPd+JszJQlINgczgkGoKs44O8EcREoIrrQcoj26k00yR6iPFfnGL5D8xb5mZxzhFxh9I9BXQIMcb8s8PicRQ35Be4g4/0JewSF+GsYj47tdyG/Nq3gL7hg4igwEyNBYPc66HEjfu1XMSLao9lqFyGPDsvIzk/IzPEzGbBOlG2fWNv4jbEowF/AqLMRnyMCI+Uc35brYXD4UXJhBfPNL8d5/AmRqwh26yIDTfqSSJyeSgneEW+h10+6kPD0ZIwdYj15hSYweeb094KNhwmfb548QdWfDERWvM4wyZBbT2PLRxjGhptvYx+vXYwddgbHc3xDawzzEjOeZ5AouPixgJ2sMd8jG/O0eBzH5rwZxxiUwg7eeBUnC2acYdo0I7jjOF5iSDIsakXEKjPOyzpqxjnsVJkxGCnOOh71GK8xcZYlwpzRjLexvTaGkYgZ7/KlkfGOGgiWmbSk7Kk4fJlsvMkKeSQ65vHouI7dJzOuMe0+hAMz3sOx7rgS/jjALub4GAe24zb3LMenKoqYE5mjxWmynAa2LMdHpKKFFzjGV8zCF4zHN6z9msw8D8s9I42XWNLM+ILVP7Cke9yBMuM+9P/xI5RIM4Y9gRnf8u2M8V3o9HhuPH4hw55hMWTGrzLVizPGNsB4GOYd8Ch7/MRHU8Yj7GmOP0nbB/ELtopmzCemxxM24A1Px41xr8uMp2G3/XGZnWEXeswXpsfffPdj/IsNjfEPtv3efMr+hQmtTt4c7U0KeBhZtFCLbVT9FoUoqQtf0MF12nBfEkNGF/7h+CMdx5HKGw0BCp88VU7HlP/oueEKbEbcMAkB2w2XsM/ihsuQYN0wjet6bpjhPf/hapgkhRfFhhugwg2zIkq64RqDsAvohjI5LYhTxJ32YQ468lCkHtE0h5pq2RD2t27ocG9s6PMltaHhPaihBevdMIanUdwwjjv/wwiMxNwwGtK1iLtrQ1j/u+F8mKzFuqmKDRt8d2zYhLzihic4sXRDHNK5YRsa1/AYKvHwQt0LXqrItjhn2Nd1w3MRktzwmoXdoNfdsIN/IXDDK3xBbriJq/zDEpXvYRnbOG64A93MDbGH5oYBbE/dcDvk0gG2utywgt50wz0cm7rhvgLFNdxCG9axIeSGVTxgMOSGgxtOqcYO30nFmwiMbjiBUYEb/oPi5IZfvCY4/MDjAcNPvG7ohj/4Et3wl8m/cR1wOFMWNPyR+z3shLvhHS4HDm9x+OqG3dB5ZII+q7snPsiwd8MRphk3fOJTDUNMZm445BXA4Zg/XsnTZ2YJ9zvLK8qdeccq/0/AyhooOLwUdEdx+N2JoB/fQ8gv0nzAq7sVwYM/ej+AJw74NhaU9elYpV6R43sLRaPQzW/4kGv+R8CcriC+YVDHAWsaBQhfr6Oc+S/4v7aZJouq8gIdFGyXfhE3jCHdLSgy36DRrPYRcoHGrDwAHpFvkc0qAgcRFHdbRbIREu+do20GdT0gh05miMuIqyFxvIGSR6B1KQ1YRY4Yfpufd8ZpwTjIkMVJfq+hxX4OdZsUiLb7SLeN4swiUIQzEHON4kDAxSbzgX92/gYJmgLHVyDlH7z6ESW78RL8Z23B0rzAIkp4R2/YNDrClpNI1jxFcdUuEn+iapcrIHbjBSELOwipPwnusB/W0C9bFgAmabbQLPeAASjTP1cMQRb9gtZGJ/AeIWBrF94AjNXX9/TnkPr1QvCog8Tk4BwaXgDpehcB9oYFgRMuisrXQKO/mGZVtxxQz8AnMFKbI4T3B8DEIfJH0A5/Hen19jq4AVbqdxBll0vw36GixRaquBuTqJHgyRxCUbZ72UDocdhIDqqNBMJbrCGHEWIXMWLtFfz+Cggtp8hk8N4+o0e68JpD0GzXwXV/AUMwcUB6MJTcFAWaGgfXeA3fAYa0XdliTnSAviILp+j+ADWYyBD4yD66Qs8sYjCZOMfpHQrRh2C13QYn7JPIM2UanA6iOLoYcBEZxLEVNYjBAlgP5mHjogcJXGELtEhv4szhlf3AYSqygY+9SxtYHILZICZKqQ3ivOSFPdADcaI4YQ4WeL07WORBdZCAMX4wH4at8DAd+57L4ixhW84Gy7h9F6QhH1jsfObE4c2rgDv+NtjAWXaQhbRigzXYaQfrIX0F2lQExdDJKb8oTh5PsdhgE2dvNtiCjZgNSmFcOcy3gwPtYFckIhsEeI3FBtsiYdjggEEVTPY22OOPfZhbBDU84x/U2egqww+JLYYfseGNsJFNPE5jgxPc/7DBKe4RBm0FDuIzEOcCO582uMQWiQ3O8MKpxXbiRJxrMuOGjwoEHZzbB1c8xA96MN0I7viwfnCLu3dBlyYMAU6dbNDHEXtwH1b+gEP6YESEAagNBixriMfSbDBWIOAVty+woyv4ovwdcaYw+rXBOywcgjc8sGuxvYuW/mOVXzSxDz54WTH45K3B4EeEbHF+ycJviAUW27towh+eetID3o4rL+Gob8BTxwH3LAfL9C+BL3qQhFg+WGFQisN0NczNQ8fDFJ6602VkcVP8PYSbWq7yUz5rNI0gsZsuwiTGTedxE36a5B3z6QqodtNl3r+eprHB6aareLhnmsWu9RSnQ25akLHvpjnsL7npJi5suin+4MhNS3iCwk13Q2cHkq2bbvN9nmkFRntuegDxZbrP04DpngwUN61TQprWwlxVykHTI26kT1uQeaZNpoM47Kan2Fl0U9yMcdNjYpvRl7zzPr1Qov246TlfGpry+MRNb3Bg4qbXlOWmV3wQddrBpfbpHYwc3LSHP/NxU74k5aa3MLJx035IyyPLf1C+iEzT+5B/T7ho6KajsMQhd+mnA1L7SoFuOqZ4N33hRfrpMwRsN33nm0/TKQ4w3XTC96Kmb+pM8Itt+Yc7MW76ibsTbvrBJ4qmv7gU5KY/OHZ00xl5/Y23xNz0jxtXgz2Yr+lBBftSg0OoYYM6LjDqQQ3nv89qfkdAX74Jjh8E3OBK8LQtcLOH31sHgqlNAfM3QNSrgF0YwQtIvTDXu2A+j1RfZXjjgBjLvkeGdyTYRTUrf8hQnCHt7jL8LVS5i3Lnc/ChxN1nwBMAtcxvCKyhvHYBhcoa+KySNwJx0BffFygcoxErqO38Q3CZ9DczqOjnkThE2rHA3ieSzZDYnf+wnXWU2Eew3ka1SKabqG0JUaU5xBcRvw/umLUEspUXUJpGHBJEwc5vUPwN2pfPkOb+ELlqUxRYRfBlADp2GYeGnH4JLKwj1Rwod8/g9HcHAadHQLZyAXQdkfI424oSzkDMMvi4vARIgrYomLm8iug+a1lEdrB8gdyPoMnL6KNltGIZ0ctoSQzRpo8GuEPU5FZv2F37DEcTFi/RTtS6iLL9KMrS5pMJMKTGSNUuMZRNIiejoP2fwD66xT6DM/to1SoBgatg3ioatQoGnaCi5gtTn5KIHxaHQvwFVGKjGFD2BHUtOoAFoFb9k4UXHbOIjrHbVZQLvu7fwodRuYz+T/zI11DGkx56UMLWxQAWNzrbhpqZPcYlJJ09UfZCYjZ5YpRtYPbNNtWvYAsn0DobbgdnOzBKGNCoOXuNFw119gZbGTp7FmY9h26bvYDOOQhg8pcNMEfo7Db2t3V2Bw9M6+wuP99sCYuHHuCSqs5u8v/9slt4EihbhUl79hC7KdkacLCDOR8PWB+Lsw/LdZ09YHwFc7rO8l1yPTiHjq6z/1jAF61Asm9s6wSnohqPTWM9OYP4oLNUxrPf2CjQg8sw7AcHsDr7C9NljbeiEfiADaHsI4wI9OCCJi7ZW4V/Jsp2Ybyssz0YMmZ5l0Fnn2FLpQdXmJmyYxweZ1/xPpzODvgqTHaIKVRnaUisByFjb4h4XkUPeNFy0KD4Vc7RCHJwBDvD7CquBOpsio1PkzktJsviD9d0dplNWuH/l2WTUDl1Noe+GBwrGQXZAs4Ts0U8FKqza2FPbLAYWj1lDf4hT2etqIU6y8v1OuuLpqkHp8yvWT4Ucp2d552n7AKzn+DtbJ2N8FgSb6QjRYxkxmWJaC7LhC+Q9vAmUAAUhjST0jkCMhKaK2Ck4J/XTIluLrDrNVeh0jf5ykozo7oAve8107hNK1j3muuqB7CSbA3eNXqzQibg1GtuIHSDoTiSafJIpomDmiYPappF1QBIU5sFhBYYOocTNuCe19QQMAVleW1a2M038Sd9TeGVD3RTr+njLR1Bl/WaeMVFIOM1o2h0FLYNzQjkDcGi14wjNA5WxOCLwTfP1s+j9QkEJhC4SBoWScMCQhcktHQjUpuArLTNiboHtL3mG3xv8L3D7hSPYUn8lKyeIu5TRMHmB87rBO2W1/xSDwAjZf+DrC4o4kdzBt7PwPtv1dwHGgn4VXsA6SwYnQjMe3iKKwL89JpdGVUCMtqat7hzLfjtNe/kMxRAaA9/RikY+/WaD5CigBJyD2t9oBTfZ3if4Y8QfQXhH9JPA4TmgGkGeNkCb3dFgQgZ/R8l/Qvm5ib/HkAQ4a9gwCuYMyZDxvjdhMUK8J/XbLDpDdh4N48QeYTELTxuKig6Y/OY1R9D/G+2lb96ByciWU8ZccqIE9Z5QipwXQVPgDW95hmTnDHJJUfuJUbuBSu9wD84NK9wViooQ7XDwdbhYLvhALgh0dcYANcYFmX5UgWEqhKOpoHSHfgPTjzpJWNvk03bZNO22R94yqMZiDDc5CuXeN8rDZTSd7BjJShx+zI8ZBTvsbP2RLtpVtCKiop5zQN8JQciDDfxlIiA5K9ij7uJB0aadQyQuozMZo0trEkLS9JzAJkuS/hHLgHrlV5laAgIVSXIXUDtlV5k1RAwc14J9/cEDr3SO3zvONMsvWF/WfDEK8GoTcD3Sv+wy1b6grBd+sAOVwmPOJQ+1QFA6vpB0h8kxZWJEq9MlL7VIsBueqUZRFPBO6/0B9FRcNOLx0Th2oFj3Tkcl/qBMx/zbkXTkLT4Y04B+XRKcxhsJSdak4A0wRctTUDCYNMmkPFKVpUB8uliH18ADInLkiYgREZkuhDA+3dmW3RCbQJe4zK7mF9KOA7Qhn8PaUpQU7TZ4tVss0l7EHMIO6kSbv6UFrjUG953NDVM6GYfZtTaVJQviWA6L4ACj5mmjdsa2sggjolzAgtZbZq4/1FKSPYSbw1p04K5o7ni+ms6zHmDH6XQ6NScQz8zPAM2l1grzQWXHvMAyaC0gke3BfEoienD4sU8sqH8G01ziyuF2uAWTCmJJ1pKtGcxLyFPnildmFda3ZsxDs+1GeJXCRYvAiMJeMKsrc2ItfPvQA1u5JeWsayaCddQ88aXBw01Em2muC9QSuOug6D7kKAZM36zOb8YJxnY9AnKsFqlF++zlFKISnEI4XVvAVG5Sll0KS53lWRhnRFFsDKWNz+N4TMtxsfSW1pngvUwgYbya+bDZuKdO20WSccCWxalsGAifITOxPGIdAl/oVrCX6hqk+IdC7MKK1GTgaBk0sy9HNaLi07aJCnImBXsLGiT59Cg6m2KmIC0KfDHOoy+ChafsCB4lWU4//OzkqZNY4XXHCu85lhJ4U8vKyswsDKVJO7um8oSTBhMZZl/sFkpkLODRZyIVHIsJw8h53cPtxErGywmK5ONqaxR7FlgynWmdHwnvSKra0Ecg2t1FQsODeaYT2PbyGRaMK/MHMmUaDINRGSavNGYOcF1vMwpz5sybdr2Z3j1L3PBP1fNXNKAM3PGY7rKnHDUZK7BBJO5gZV9hgdPmStWwGOvygKOfjIlPH2fKct6bjI7jN2FIYrJBEy0iKOfDO8XZirEPVzqM5l9HEFlavyr1EqC78pnaAyfOYQCbjJT/njntYDKPFZfk/mHs7nMBy8HZD55syDzw3PGSgz3OzPfMsWYDIzNTYZ/WVqJk/o7knGLVyJMpkv+PPKKQCUCXmbuGQ/x1mRGOFIymSfWP+Dj8ZUo3uTPjFnuKyOemYN/czvQ+Ex/9/Hascms8K+VMvwz2QzeXjeZZfrTsIfNZLDGm8wqjrkyKeIGRCmT4QXczBppWOf9jQzvWGRgV2MyORw8mUweL0SaDE9NM3OinpiMU9F9cXwOt4zhXwVlLC9tZmKY2EwmzoGZ4S6kyURpBZxZYHctsqQE9h5MZh5fz8DwbYAB/jdKDxz5u53kP/IO1rlBtUaxdpClxfZgQ/ljcfLscP5FQaXNaxiVY5zvVVo4rawc8Q5qpcGgJt+HqFwzyw0GUqWDv2GyqSpeOU3VsRdjU/gPHpva56WkFOwVbYqm8TZ1gEs6qW0+l5kK8P65Te2KCGZTNOm2qTJ3UFMlaCc2tcV3UlOb3IxNXXHXNNXhM2Ep3iGwqWs+n5k6x5+cps6gONjUJX9cwFTQpvA3yTbVBh9s6hQ7eakTbBmmmkR8bzZ1xN3ZVCus7wU3hlLP3MZMvUJOtKkxn1pLDbkVmhrgzY/UE1sz4mOdqYeQnntsdtlUn0U88rmtVBdznE3dhiXdYb8x1Qt//NHMu8LXS1MzPtKR4usGNvUTOp98CCT1ActZm/rCC2YpblmmJrzPleLlrtQ7xEmbgrCsy/+wP6fLuO6iyx94/KT8ie9Gl6f8k95KidcXym+QB3R5Ah2z/CdCo6nwNXpdhoisy78YlLr8DSM2XZ7hULb8SJPwCvpYl+8xrsoPxB5uRuvyHazqdfkW9vqmwrW8PObLE2VYHuryM7Tk8gvGjS6PeP2mglN4XR5QGS0PEWZT83xENpXAM5o2tUheL3CYRcNBgocqbSqGHeiU5ZAzfLY1NUctrnyJpbp8hi3xlGZ8Dh+5TRVxa8OmCnxBLrXOAbrGPeFUNhxqG6wnxUuIqVW+7JPKYEc8lcaOnk0t4zQhtYQ98FQyHNsruP6lczxU0IVHPEbwmFK+dEEuJ7PcI96T17kNPO30OAeZV+do85DAFRCT4ON2JlHmcX2ihCkmsYt/4Ejs8F9EEtt45MAkAgjVJlFR7lmcfeUWxdnDU7AmUcfRgEnUeLUpUaXhROJIOZk2Ei28imISTSwpCbztbhKnysk0nhDdF9UcY6fcJNoygkzikpcBEheYhhPn2I43iTPcvjGJGxw1mMQ1L6YlrvifeAleVUr0MDUlurjjlLjlv5Ik+sqhqkc87GES2L43iXvwxiSemHqEm1mJIaxyTWLAf8BLvOIvYBJj7OyYxAv0B5N4xlyeeMfxv0lMudgkJsrJNJl4w/qS+CKz/qkIOPfJ1TvxwSp4idEkfvjX04kZWfuNHzq3i9dqTOKPz5rkdnC7JFeGzafOlXipJ7eFP0bPbdJ+JKFDrs/xH7ETDiPUJCzbbvhf4wkcPZlEDNK1SUTYv4s0kUgsYN4yiXmsFIkEF8hEkl9JYgXXNhLLOFgxiaUwXQYzbiLNBqT4B3KJVWbNwmjCJDb4bykJWDGYxBr5WuScnyjQdicR3vFL5LC33fqQFVbg02t94i6toAzM1j9ZzQVkeLW+RAcWEAmp9YY9bqAET6COC8LPV6MEZWlqvePVGcEVr4UbGjp3gYc9WrCMFrj0WjP4ZizvB3qS4Nhr/SpbBYr3niXfQ1hvPfwPRXBpPbKWR9bYh3AHfPdat9AqgBteq8u8XabpkZIeeNe6Y947hj/TT4uY1gvTvJDyMfOOGftK/yvTDBgyoH/I8CH9I3BuxIAnFvOEyyStNsyRBEU1aB1jWWydQIUTlLWtdQppWRAVNmCAKmiFK011DnARr9UiK1qYNFpH7IIjEUZbHTzoJfjjta7kSxRoe61rXH4HSm5cbW3BmrV1RorPWAmug7XwR0StC4ZeMPSS/kv6A5FpBHa91jbK3Ua5O/jbC0FZKFu7/0dpSgk7AYLgeBnDsQVJsbWJ0Srox73WFiZKQSlKJu9FoBMSDlX0Anj+4LWgwQlcea06LvkIXnutPax7gtLUfXr3MSgPyIADeCv43IHS4lUY3QuK6tBKqUgTKFNRK83wNMMzsJEGLnmtJVw5FTQTrwXNqbXChCt437qVpD/JTNg/a3H/rJWHmCKY9VoFfA4FnCC1itBKBf95rTXlHwOdkLXO3llH72zgqxY0Z14ry4GRlZmsZVCGYXNoBd2CFXTLkaGOg9fHi3qCEjzHdHNghsYEKSihCfI9wcTzDJ5HCqx9Ak4o5xG04I3XiuClEkEhn+9pACVfjP4Y/XGIBi2+vaFzT9AidG6ELdDcA/rl74vv9uT6eNw6zj8lieMdERc/4HFgvMK/zohXVWRJnENeGYnXmKIOe6p4CcW4eJlBm7Q/im+FYQGL2w6z7uAB6vguPnYXP2Pqc/wtSPyC/ksS0OG/ncSvGHQd/riBdVa8yaAWpAMXP+JT4PE2TkPjMIMR5wSSmIuf8g9e4gOehsaHPBuNj2APFX+CeO3i2LNx8Rf+32N8jEu/Lv6qfKS4hZDt4l38jUq8xxrvlA8icQnDxR8Y9IiHsly8z3K+eQIZn+GB8/iPTOUu/otXweN/xLewmglu7MSnNIKKv/OF87gsT31xPknbPybg61A5/GmAzv3IKq1zn7jLmfvgxn7uCxKbi0dYcRRGYPEYHjOPx3kgHE+wkHmeqMYX8PB6fBGXf+JzsLeKa+yjuLihwVTc4qNxccdDyriPQ+L4Gtu3jmPR+AYvCcWzPD+N5/D1u3ieCYrkzxKPkuPL/POb+Eo4TvhHpPFVXgiLp8IUaaxZLp7BLzM9R1vMlFrrYx/KxPSSOzFLf7DunF7gn2Meh1RIp9Rdp/jDCXFuuLThVFj09mkT19enDcHHV1wZnB5BBJjiLsDjMbSv6T6eGzXTPagTZnqA9XN6SD10WsUDVSM8oaZHjmi5hTEyVDendWxk6tEcHmIYLWJjYrSA17pG89DepjUoj6M4/mFHj2KQNvSIV5VHNEEZZWCVOi1D4h2lmG+VmCSuEJex9WemJbxpOypCTB8V8NrgKK9cVRxsN+hRliVt4XURPVpnxjUGbeK7fSyzoaJSiZA05SXz6S6FpOlOGDbjf06M8HSLHt1j62R0x+b02IQupM/pd5joFQLQaAwlR49eMI/q0TPDcDPYTH95mWI0ZM4B9kFGX3jSSI94l3f0yVQ/YWHvmKX0aAotZDTBPs7ojWX9cWtn9F+arrQteZ6Jfu+vqLmSFBUUtSIqLuCKgoisVdxQ4RYXFBW0+uPfOSfP++VMmkz2NE3Sycwv48cQ/lMDSpPi37jsSwYHaBg12MdVxsEevrFqsAu1rmpABbJqUGDcPFQWqME2tB+rQYUSz4MTP4AnbtuoQYmL8sGxS/AIF8YGRRh5U4ND3Gke0LqsHr1Tf9qg4c8jVt0lWMMB1uCMCowHVe7MR2+Y1NXgmn/RBlf8EzW45BnXoMPaXfBQZwSpSDWIKAs7aPvTshYbjSD6O/pC8+jRpx/IGnE0wVmyHo3d0yOb5tdt00b/8ENOj54whenRAOvMkcxaspocPWMFfHXqenroUnyFrloNiQhZyI/ueNAy6kIUU4+ocVOP7rH4HfU5hHsQeApSx9idBin8kAxSRVjnSh1icg1SFRjkSp1g5xmkytKBQQp3TIPUDu7xpgrEPHGbeIDuC1L7vAec2sOF6NSFLIiD1Dlsf6UinIcFqTYTueYF4dQVrmSnLmlCLtVxfjX8aQlSvIebqkLrUZA6la9rkGo5hqa880Gq4R7qsv0MUk8Q+AxSOEENUo9o9iD1D4uUIDXETePUq8yWQeoFM2+Qesat6CB1h5EepLgZD1K3uHibumGB+uTu4ZJfkHqAJbnUPZZwQeoXU3iQguGpIPVDs3Kpb+f3x1p+YDMRpEaudO+uzm/uaYKODVJj3ptOwRpGkJKBcO3pIn+76uILLS0WX6HmRReH1PpQ/Acx8eIjcUB0gtzFe8f2QL8etkjFPrc+RadLo3iLyVsXuziY1kXco9bFP5rJLn7z5Kn4A102uhhDXEwXf3FLSxcpFF6U8smwLY4d44SzaPEN1px1ETredXHk2D94yzs1i4k9SM04Mu2qTFuPQWoe66wgNcer9akUxmiQSlIHWXHXD9DTCseFQWoKppJ18QAnLkEqoD24lIUYXZAysOyX0jjfCFLrHHRrMn0GqVWqBEhl6bVF3CRuEHMcAkt0UxNRkFrAoXqQSmOnG6RWGJRx/QIJKrWS5r+HlQXYzVYri7LSCGQpOnMCYodbfCpOkaz9o2e7CaKGTyBB7gVk3CLHZpl+K7sgN7d8UDska2lGPvlxpOs8V/g0dU/ydgOi5++YYg24v8kQeatBVt68YEl2etsgtpbk090zSLA8BWLO/jFscM+nXAVEHxVB1g/JGP6R4+aLpL/JRK5eyR+9gMjemkndkqRqjpyR/2yL5K1P8s1Ecm1g8sMLFv0g3QExM20QvXwN8rEFHL3T6z5N8vYNor4qfJpMM9bPNshTHrjYolf8S4buHUnlgeTfCyNvZMiyo0BsT1pdurv2Q5LrgtjrFkm+DJK4AZrvHshXGhjMTsjQ7IDcdVwScyRnzyBqbZscuUU+LZHFft7xaS9iilPXfKpUSMpZL5BFU2sIooclPv22+VQ7BbE/Z47UQNRjRHJVIef9PUn7jSxxgeTt1pEuU/kYuyfmYDeKjP78znifOXq+G5JwimRxgeQrBTK9S/47qdCWb65rIOrxBWSwRr+Td/q9MSjx6Py+QVIPwD4DzGEVZGMPqJcGjHM+YdDZDUiGcczRMjnG1yD25Y2eY2avv2VNhSN1mRo2/B5R9WFoVApdIcnkQbQu8KkPQ6KyZ4cZTXm51vm0nSZLpkTSWiXZJ1FrHZCrFh/OmIgOGyTbsGXpTzPIHiRIrmNXBBcZr4dsI+eAiVfgXAqoxw2S9Q6Iuv4EsSUL8jYBzjSBx3/A2ojBp9MgYYXBTHJmHni4xUSO7hn8CzTxKUl9l6nINCHr8/kGiLm7B9H5HT5FryDThn5HVTK2W3wyjKajZ0fIaH8YZh/SjL005FPTPWWWQVTqlp4bAz79XoPUPp1fTJLcJ8nL+7fmm5cyiEp8g9hwl0Sd0nOuxqfLP5Jxzj39OsII6n4RRE9OHanSM33sWH7cU8mFHThy4kiFYc8FkGAxy7KshiSdB1lf/UEDx6p8Wz9AzM4eSaIGEqw9guhHeqrUNJ+uCyTVAUg2xYdwlUQ+YUJWkoz8QFQkJnNP0pkwEtO7MMCnX6bdi/kgNcnKFy0C0bcPfEp2QMxhj54vzrP2DyRYWKPnxxmfGNuuFMiRUiAHx87viX7TB2Rvf5OUGUv/i0H2ky5I5o7A33gDqoM5EH37DXJF3LgG2jjDh3M+yCKDpMhIV7cge0t8qEZMoeuCLlf4NLgg/3cJJPHJh+NNkAvGMgt8UOUEyDz9grQshlemsMgOjCyCkyB2D3ph/C60y/hvHwx5gEYXWSzPkWHhyJGiIzuO5B3Zd2TPkTNHao6cMrGvAp/qKfdUc3mXHMszCOYF2Tz/HdCvx3xMSppK9o6nXZBgNQlibv9AdHeBT1NpslT+OdIguVkkuWQ8bUd8WqCnnp3h0/0RyOIlUK24JD8mDDpedYT86m6OT70ms7s/pWea2ak32T6taPwNeTqH6JstrULdjS1xy1+CtRdbWsM/RFva4OWwEjagtrQFG2ClTSiQsqUFHMWU0lR1UqJ5sdIixMZsaRnXykohk1khZlzANJT52VICRwalWXJRfastpbCrt6UkFNTZ0jwOUEpzvhkKmeLZU0nBrIwtGRgtK2neVYO1nB+pBBXwzPpHbWJEPCdeAPXtC0htAWixZJjFAQbCP4HjCrC9DlT7pwzGWmDWL1pgdsCI3XeQ5ToDiPp7BHKwCzSzzD4jO5QZ+bykQWz2jE9RBeRlFmh2qiAqtUVylAR5+yP71To5tkuM9TcBiRlLH2XJPt0F6X4Bp29cVvOOMCHb65AvRz49IDGdVz79Y4J2WUbutJ86AJr1NRB9+wmCFMXvbA5kMQTaqA+iDudBZrbpd7VLUtEkyTJjHdwxpUydnt/79Ezdk1ysgwShBVlmlvZgmeRy0QXxaUHeNNklPLdBsCxL+Lr0TvI3w6CoC6JWX0luRwwb37uwZ5LBryMxyQHD1Nwtn84PQPoffBgyA3W0A2La5FCY2iRsowzSfuHD6jHIzy4zO8No/MGBWumbo/6XGOP4wJb+ON7fiW8M+OB5XGmEm4elL+zZbemTDxOGj4mP9PlHfGLsATaZtvSCn/O29Mx3BgKAtvRKrlvGu8EfTFu6w8muLfGYsvTgrwreM0afPj2eh5YiRmxTSVDpgiE8cC3xAo4tdXhUW7qmuerSFYOqLMwp3TW6YYjBlho06Viq44eELbUwM9tSk2xF3gYtHfqJBSHH+BVvS0fQAVUqQ1TJltxFkVIFVztLJ0wvz+lg2+8K7rjiFmAPsbTHTHchL2xLB3zYh6Dt+FYmoHEXp97jOxwnju+pPX/lDf8B1Mq7/+uNH/w7cY5w/qVWPhz54sb/XXAeRL1YPhW/QBIj4KAJ1KUQ5DbFh/MKube+yb28DHJ9zIfzZ5Jjsht97Y17UCUUvMm3bARi7w2Isj0Q/bQAclwkHjJkdMaQkyM+rf6ARBfA/Tvg1zST27Igz6eMecAoEyZQV1KzGNba/+RDuwTyfQjUj0f0u74jOXoCMV83IPj4il/tmQQbO+EvBHw6myZj85RPK498emCY2d/3xn0cBdwU8Tss6Pt645Tk5B7kXwlo9g2I/U6CqIMbem6ek5wug8TbfLBfZDxLkfy80LNMDnO5S8YtYBCO6bfdBElK//6D9qlAWlv1QaZ/gaZzRT8ZfngK70DuLTBTBL61gWp7jeGDS5D5OaC+NCSPTEhdHoGkroG2+kn2kTR01z/01OY9j4lX7iissHKPcwm1gt8C40d/5kvcPR5/rvRdyD8/OBcykPdyPMAZ7MoTpGXVyrMjL/hbETz6d7/AaeLtAHifAKptC5IpA63aIElmQPLvjPgN3L8n83IJZPYTmH5ixGOglF+wuwbclEaTFjz4BVF7A5KvPMnuLcn3H1menefRDIiee+LT+A7kMEG/bkzG31k+YZqWp9Y1idoCSex64yce+rT8TAZo7+9IwjX3dA+ihld8aj2BYNEtD+0Byfc7yc+QhCnpM3IoTNXi93pDz9MyyM4bg4pSzaYs3Isg7wqIr19Ttk7vIPggSfgeH/QB2dT8NMhKhg+1Q5Ag5JPdZ2R8LCWJtwcQfLiafk6q0/D13ReIehyQvOyRbDQYlq2DvM4DzfkIZEA06SHJYYHs1RrZL6pkX6DfGVMymQmIXd0kR4vRgvUVkIZErvu320D79AyixwkQgxas+4HMxQirX4KowhTD5sipzhSfkpsg00k+3J2S//uAHJtb9Lx+dLEXZNTWqep0BdeQZSk2DYGrJ4qpBdd+7QJoF+9A8kR7lgUxf1t82lkBCYIcPQsVkN09xs2Q4RKITSC4dkHUzjqfQnraOnOSva1aiVCUMSSwFcQid4Rc0O7QCi2CBZF8QzZB1LHl08UtiClm+dT+BMFGCCo56Gf0M5/OqyQfLXriRRHyts+09mOXVs6RNXqOdlx0eh4ab/yKw1+1Ip/Wiqd7G7Qg/UTLHL0tnMT3NimV06MURy8L2ZHeOr55wb6sA65BdOMFZPLAh6M+iG1M8WlpDPKwTb8VxVhRjUHSS3h6GoEEc6cgf/RTQZnk4B0keUb2eUdOL0Hy08DXdSnMMgVeeqErZoYyLj2cdXnBnrysKRD9eQjS2gGahxMG9TRJfpeemw2QIIzJf35Pz+6Ankt5EByX7cnbEzKowfTVywEIhjdinYEsT7uHc0c6jlxJqSjqqHuL/sxACK2R9JKQi+nNQ0forp9LAzMJoB20QXBws+svfzivJojpLYCoyhPIXJUPW8DOEdkSA7JdrzN8CBz+0Wu4AvJzCtTb9FNvG1IAaHPXvQTWWbo3izP43gx/2fQMTumDHT/Q0yAq8wSCMxt5+I5B9E7fBXVALFYRO7IKG/NpucGnwX+kCbKSY9CAKeqlNT4tPTCblVcyXmrHkpYiBJCr7VnqkelN4W9NQSaMCYjG9l9I/x3EFP/xqfpEMj9PsmtJND3N4w3J/jHISoJBd0xSYSEtQekdPh1rhi1k+RSvMeygAbJ4D7SttpTnEof0utfBPWfdu4Zcbu8Kaq57kWvAthuZF34APirW7lFEulfHTwLdq/KBYtW9Gn6T9KAvXvfKslwOKrKQWgTZmwcGmP4q/ldEvAAaLJ2ErboJYlMTkMQbHzKGZOqA7Fdkd+ShwuSCAog6p1+QbpF9LyZ53nX8TSlLBetj3TtxdSk6cog1tu4dO3IEuUHdw0shZNexHEBri+7tQ72g7uWp5723jWslurfjWAqumf6wrw9K8qVcAdEnBsQcfpEc/QNRtxmSyzuQ/CMZcQCFoAvGvp4mf3jtPAM+HdDz+9H57dDvibmZzJnj2Gbss2spyY9vUINvaozv/eInj+59UZy9x79OvQm0xPdgilD33vFHvwcRV937YB+OKCDZe6HXM72GOJrQvVf8ye494uqj7v1jyBO0vOjegMy0ytO7d+F9aIc3T2n+1e3dMuiGZnd6dzhy1j3sqsYwtjr+8N+8mwZkaVQ+A9lTaXhIzK5AM5Gtzsj621ZnuRfCRTnZhlVli/soZM63smOszkOG11aT+MlscVkOYQo/xGx1ypedZdVCfs1WA2jKqWpc97NV42KtuRRxM8NWsxDmqK7SvUkFRdUtKCiq5ogbuJFeXWSBlrBpqqYhKVJdwK8sW81ga1UNGbxMhBYwW30i/qPi2Ooj5Deqr9y/VofQQFR9hhad6gvEWapdvyR4xxiwYmWr1DRmq1wk22ofwizVe8il2uoD/mnaagx93rb6yxv71W/8f7fVHyr6qf5hGWyrI5bzg/qHqm8s+jutzlfHjm+CdbStfsGOh60ekR2X5231kAprq0V8lWyVem+rFUopVUssZhkdaquwQmurO0x7mwF5CNBU99mau64193yZ3231nLW8ILahAKka4VexrV77SKrDrSruvaELz2RasdUa8ZRJNplJnT4Nwdq6nwe0vdqavwYoeLVV+RAIyLa8lsU4FZye82pbspgSWPFqm4gEZY9ebQMqRQThxsUXgSWvtoQ/zbVFNLLg2Kst4IirlobwmqB4rOCPPHDdq2WwrqotM3AZgaE0lcCDV5uFJGsNN11rMzhdA557tWnpfQHz4dUS/i1APhK1eaYwjxTm6JyDM4X3A/jr1ZIoOC6C1hSE3QRly1SbQvJT8kLXAv/aq1lcjhXUkjD1XQhKdjihFMBFn/wm3L/SPQJS9BhaS2v48V77gayRYCDVhbkgAXH9YV8qaJRXg/yFQPDl1UbQtVZ7Rw1oha72htQmvl4Dqh2vNoa0Uu2L+AnB5toTLiECpWp4SQRkpqg9QgBQMPZqmGFqVKtWG6JmsEBYe2XrvbL1XiAVIhgI87PMIwLWq935PwAlzdCFs0vnLe4eAaVHbliwGxasj4MOQTPj1Xi6IGik/g8QZhQ0Upt7CCoDJZ8L9vG5vwAIvFqEG6o1Gk2utTFk2mina+xdBf+82hUZrth5l34GIJNUrSOLgloN/VFj959BGkpQS4mrbM4qltq1U5b1lGVtYWS3IFRfa5KlCcmSGsxuCnS9Wh07YkHT82rHEH0UlO6BZVyBN69GJSM1KKapHWLRBRx4tQoKLt9Ir3bC7E6YXRlCG4KSQonOki812YH+DaD0SAHDrIBGz7OJaF+hts1Bss1BcgDTe4JWYu7DAJ8g3HuY/QSlCLscCLsYCCq/wZsOaxDtya/ztmI+CwGx/CrstHbWiOu4G9DJ4opIZ5UXWTqbuLABy5s7QmgQlpY5TYcGYWky03TSxAXql+lkcDulQ+uwnRCLcNPBjG06MzDL25ml7eFOguHTxHliksEpZq54V6EzBU1FHUtbNrQZajoagkWmA/MFpnMOc7807Wg6bdyT6MCmq+lc4/aH6XQgy246lww/c1WoQQFP59QVoso4TZnSTKeFi92mU4dNYdiFTAs5YsxjCP2bzqEjRXwRDIxatoRUYA+oU+KFj06ZNS/gtTCdHcbdZglpk6eDbY8QWkHu7OIKSGcPGp5MJ+ZFnM4vpKVM5xtvpun8YOibzp/rhxFvHnU+GPmN+E4cU0tT5xOmhjpf9BrIiDSdJ6hl6vyjzyMuShiaGjWdZ3q9sLm7bO47fKhN5wb3YEznluF9tgosEhQKqCuw4K3IpHcvGKwNBc1WRnDmFN62Be/FSwbewSfRBcquc8X/u4Hz3CDOHFjuFwTUxgfwZhHej1+C+0141H4FCy9wzhwL2u80ol+tgS+YYSKIaf/9CL4HyHL2FbiUYCGegAkwLs4ilRTLcz3LtCL4/FaBs59IK33KMm8itFBgHkVE3UakIxROXS+xoHnB3BS8dxFTPSJQ/ztlimOksn4kmAKLWkCD6Oau4PIKnKtzyKF2InjwDef7OhvIMIdf1mIePuYWPn/XTPAM7uNVFKt7juqionYGpTXFJRZxBJZhB+w5NK4eIWs7jbLouz9wHj6gGGA3SbAcsitzyGJTgXmCfPYOwcBuNofwmOsJrDcEjlLIpIhnnWWZFvaZ+R5iv6Kr1QMKNUKPmcIqGbdQJlnPiHuASDqDltWP6En1WgPnzQPzRNvoNngGH/SOwXKSZAJX8Nmr0Y1qqlNW6gWjau8ZBRi/EJ9YwXn2KrtsBZFUfofIDj1FCVSZ5VvNIcn5CtwzyHt6i/UoAzscbgchcKbD8YthaBQ62v4xdJ/jmoN0oUxGjCtdxNB7A5/ZxqDTL2i3VYw8/Ywxs8PsXzCIdjjuKizhNfpDz6O9baLEzCwzw8u2Q4/1CQv+xUA2Wm0ZlVhB1+mpARs5TZ86eCpvgmMMLjVNvN7kyMUQ28HANS28w/r4H1udgVNo9QWO2Qi1Oq6waoplRnvoew7CqRJb5QKDaI9ZotPME9uzgaKoGIUIshgept0XnAej+UXN9ckjWH7Q3zkt8IpWtl1UWe38Y2ExtPUQjWru2TcfSGoe2Zsrjv4hX6I7vgMZdtb0huAuXnmVRO2TIUuCaUsF5FhEBZvowQjxnhCBnW73+DLusXpnKIiexeDTX3gnnlDqPCu9iDds84u1wHuw5eaZZ9acLReiWYpocjvAgJyf5hvHTllgKZ4wTdhLVOsXQ1OVMJ1pjcLlMEvYOY7qLrs2fmJSoVfgBQXBK69QorMkzvGxLHkKZVwnABa8sYR547J8Jwu8xFLAJZbxCb6lhUOyHYKtgFssAhJ2CkHtwpF/D6h7Nl7HRiteoxmVeBU3RWychW4OG29B6NfG0Nti4w0IONs4x1/jMaXo40XHt+Aip6khLV5xnsu89BCHLo1ZbsLiGShUjXkHxMYJHJ7YeN7xzznGlCNJJFk4hto8QSvbmFi5XKao1S0OXCzrPI170rgXEF9g2xXjbqCNI+5O47Yr2jW2SvEVLynEl7zkEHdw6yOucYMb4+zFxlVXhlMsZW3c4rYwbjJug61Vx3LTxse8mRAfQQLYxkVqwI0PXVYVGniJT8hfxjWSuOSS3fF/BfOuHbdZ4gMXdd8x7Lk67WK7b+Nf/suMYWPYxj8wCWLjb/9U8I9xoQDZxiMqhYvfcQvJxthA2HiCpb2Noa/Jxl9khnE1G8u2QXCA/Wb8yOpTz1w8xE4zfmVFX2D8ycbPkFa38R2V58VdLDZtfOsbtMgNbmTEff5yjXv+heAD9DDb+B5bvnGdQw0LrwIscYwbOOQptLjOaHFYwm6HAAZuFSLRgta7OoHipPID5HIFgxuvfA/NycAFr9xHGYBjr9yju0c3frwKJPa98g32gYJDr3yHDQ/w3CtjP1Pmfqb8gn2SYOyVaVsJKGkMIVdSxmWN8is9XmX3WeamqoxNVRl/m4Cm5pWfsPAUhHtA/wHdX7L8AOqSV/6UlaFA0itPWKYJyjTGxQjBF6/8jgEuKC9mBzd8y2+4dCJopQgf0JgsWPDKIweydynDUm2ZlmrHtCk5mcWLaSYzvGE9mfYnggneJJ7MU13kBDofzCRFE6OTJBVeThRsQk6msM0wk8AFWRkLZgLtS2WMMIEneV7HLUkzWcPtTTNZxVp3ksVxn5ngV4GZbEK1gZlswJ7pJCfVMpMlrEEni7gnYiYLvKc8SWNEmcmKyz/DC+uTZRy2mEmIMWAmx7gsPTnCteVJEePKTA5x5GImFZwcmMkJrj+ZSRl3osyk5FtZFU92cGt+UsA2YpInbtPQ6uQApzxmsu/i7mH9Lgks5IAPkaD9eRJstQBNgesSPCXtE7/QFlC1vKDZSiHG4Qm8bwC3gDsw/G6BYR4Jfw7h3AHXASIsgOFgFqw/TGVC/EJaL2VkVBsJRm9wHi/Ae+ke7tUDwTWWMj4UTM+x2EvAjUvBYB2FX0amNvmCVFc2EJheR2aIWNhklB1iBni6DL5kEQUD8yGTu15h2BQS1QO6E4I7iJgGRxB24RuN4YGUTQEJ7aHauo0SpGcAqKauVlCi/jbdyClNhhR91xB5s8ow1r73iUC0dRqV0tkZ8iGl9LHAObrFHOK5cY1nlC+N5tt7EOiWWTBUQH2ghfbQ3vr9F5hg/UeIa1kPnUalzf43sKIEjy28z1iRhRqHxCm8Z+hsEDEmFlidlzPkc5UlN3wu9shRoccJsUg8Zm4oVhoV1B9stW0OoXQaeIFRphUT2PkD1uCzwJZOoy8WenSy39MohDn9Y8H79EFr6HMMXTO7CwzRIDssYgYZtdFU7X+MeIE2mLpkIXpEJNLucaQx302OS+CCFvjHhk2/kpdVKGCcqkvgEcs+3gfuvyONOprXfqBjze8L3TUimtDsIUWTw1ujYiazw0BEVfPoH9M4h88KhpW6GzNvlDaJtg4xPPZQ02DhjBEDvDJoo/CLzxhhIVLTq3j/InY2DgYmUANmJuf+n2CE4xwzafsZwWvc8DSTK0cuofnfTDq4hWwmNXKc+cl1IVWoVJic0qvF1Jq4P2wm0MItpO7Ik5t5B/hQm8kjLoyZyT8q7J3wCreZvOIPipm8cIJ9xpUnM7mj7eJJl1v7yS0uFZoJDceZSV/WZ2bS4/z7AENwZnKPu4Vm8guhonJZNv8CmLR/qO958s3TickfTq/M5AN6nCcjl9E7rtaayZvLaOLLR6TinwFwyjL58nOCn5idyyeY9k/4qSr6doFovfKhfwiAMoC/DflwlI9xK0xQPnQ4PhHQ8qFsMCKUxnvlOn3qfuCVZQG1AZx45aZ/DjjwylVUR1A8T6H3RXDLK9foW8P9z/IZP3eXyOMSR+jlDtcFHXwKr2VLAFRe+Qr/MATlI1iOfBsBlTRRG5UXlKQv8Mm8wCemfP4fXnkqov6RCAcsKnqC1o/kJWSx6n+yyIap3q74XNEnxkdIULc92PhdJl569W/cLAW+e/Uf+v/Qf0z3mPwTuid0f9L9SZ4vur/oHtE9ovtDmkxALXn1N3q/0fud7ncm84q7+YL6y6tDmXQdv0HqzzhzFTSHXv0FN1AFpSHqAxl0dSrvq8MYG4whpzyYP/4Dql+v/ogrjPUeTikFJdE+A/sMvMdhrKB4PJDjARxdSNMB8179ThYM9Rs/MQvse/VbGS0C6sOr4/YcrAave/VrZnstC6B6B38CBQMp3SUWQ4IvXv3cLwKU9uoXUG0qKAtwWCBOAnXDq0d+DaCUV2+Cu4mhU2/BYqKglVTr+EskCHcDLA1EOaPvGX1rdNcY5xQcp3jn6lV6V6HqtH6COLBLWa+AoUKGEpwlOsvkLTO5I3gf0fsYzmM6D+E8pLMIZ5HNsQ/nPn0P4Dygc/c/lOz24LuH3AtoxIIsP+s7WBIKypKwvs3WyTNCXt6M+iY9YIFSQKLlsN4Fnnv1DfhukHcNswZQ+n/dvwWosVfPYjxkZZapr8K1yi7K/IfSUSvYSAoKQwiGkEHL8toJdL36IjwXwbkE1xKD07Icg4Fu8VjADCMoDHPQIi4oMO/PBsCWV0/iH4SgbFTqtD0rKLNHfUY2THWo7qzPYvYTDDpePQEVM4IzM159WuZmgX2vbulrsYStB9juwQC5vD0aC3ugVNRgCgRKGyv80atPMd0ppKuSG1DboJJU5ZrcgjIElYSWLNmDpM4FPyM4d+C0x3eCR2cCiaGAeckDZfcoeLErqK/3BRnvPQ3fua6g2ogFF9fIMIDTwDk3Bm5u0/0nGCwlBQtgPqjB9+YGuNBGGm8VpHf8BZ/JO1geGDEHnM4IHl6gjFni4QYxR1wXbMzAWXwCtwzHB/98DjCPbEOUySwiS/WKlEwmR3cKLMhXD+pg1AhUa6dIaqmIwAMEsurHs/C9niB6oUUOoMknwDJCeRpIxI7RiHoepe8/I9WZHsLQcPZ+j4ik9Qyb/Au40EFKY7SD6aHIv2jcfUTRn2wp+Zwj9yu62UaKJRmjDPpXExXwEN4zyD1YQdFMC6W/Qy+3VxkFXaGfl5FDH6lebTBfQzdY7Al6bgWFtxWUZBVlslPoFVvfAncbuU+/IKVzFuQUlZxGturpGh4P5JNV/4P/ghzbiL3zCYYQyZtvNnNjU3B3l/24xsBf1B9VsSEG3hY6Txd34HGHDHef2Zso9BX65AodHSxkiRnGwyhIgiHPQtyhecwUIqsCxkmR7bjUAHMGFcxxrD2XgWtNBiKmWkfFc5dwvrxi7KKu+Q8mh5eLegeT636QEbJM+6vJ0D2tUDFBFGGGU8kF2FlKpmlcO7nkOBbxS1klUy5aEpfmk/O+PRIyBylblaRWg2TCMdAye3KGbMZ5aUcCR6wjiiQ6g/IhFUGCXC1XcQdZRWXclVbRAdWFRXlqto6gRa//illfRSGUa0bLXDbM+wPv6oCiF+O85NqHOE8fWr77UITz5y9fCOjbV0GzUyIew+c7RUwSZ+Evy1dwjogf9DeCNrdPNxIz00zyV0BtAJffwfGWEDxCFvuRQAeJvs8LBEungptLiFpbgEfiHO5sA1h/QMITZGWXP+n+IsbEb/h/j+n+A39chs/7CnyaNfgkj4gn8KneIXSGVckAClP0voY7/01EwrZ/QPdEcIB0dftScIik9HZF8BwVzvZQXNUS7L4hQoRE9RHqZorIXcca7dA7Q6JphKoxkrZjNOGkDY8MKmf2ka9ZAqOWWQSILG1mF/5raDj7tQe8r5MTFTIvV/A5mWHCqLTKwq2f6S4dAtNsjA/LWI9MbZE+qM00OkN/JpnMFZNEpygLNKfoGvWZR+gqBonpocqqg87cYneMWdYxyzFfAN4xUqCAdxgvKkStTIJDYtMQ0SzmawP8S6j/0zI8ZnPABeT9VCTHDmOiy83fNsuOMRmkN5gra9mXD2HxhFpqi5A1E1LCu1UsQzlF8Yg64IvH0LVXhMVvVSzCXmlxH6sSVTyAYtviLr32sFRWxQLMTqviDtX4Fbep8bGYp+LdImzqqvE2rterIo5lVfESS/TiOUXVixdQDlJsQ4GPKkZMu8nStMhVlyWIKjagW71Yo/sUqr+KVbpfoeCjOKRS3+IzxNRU8cWRAc4VFVQWzAn5xxe/+MiU73FTowjJLFXsQllK8Q5KNIo3UP0B5QVPQv78Wa9/CR2XxRhivsVf1vgbCmSLP1TjXqTduOKEmv+Kn458+WjeEWv14b8LvuHPffEd5stUcQ72TVRxnuZXikmK6hZTzB7qzVUxIVtwIdOMZKlTsRgwH+0HyNuwlIqqfYtT7KVNNsOWvP1Ccv6O4AbLvOZPV4SssymzmImLq9QeUgxlh6GKy9BHU1zEnkwVl9j4aU6YRZ7+a31BST6oJgaBwkut2+7h2hFc3dGaFgd1h1pudA1NrzUM42pdxa0hrU8hPwmlyR0hTUrN6gb96lBZrDWP3bU+cuSQuux0hRwnxLILKWG/qPUOBEh1nvYI9TYZDqAEV+/TveeYd/nwC42YWscu8x+qadXfrvh/kJLVH9SHokf4n6H1uwt6o6ZCPaEKXE0jhfoL+kn1J3VZ6AG14el/UBaknToU6FpG3V/c0zPkrfWde+g6cgsTjfqGyfUhKqx7LuDBkXuqeNXr0udaw1qT1qusRpatv0X3JnQW6Q0ILeocfZYgiKgXXFXT8o5ovYIXVusMttVaL0N3sA6JsxAj0XqGJZ+GkJuGsmQ0xTxVQuo5dmiKuoS1cpWaot5QHcC6p9aGza39plfQfuIQqPe8PqRHCkZaVcC0Pb235f96fWqwLFj+vLIQT0g0aQcr0cJ63iTqPF9JwKickDNKaCRO/XXBKk9KEhhnJuHEQxKQFDKJS4jdmMQ5z7ATFzjRSbR5oJKIoNLGJA54GJKAhU6T2GMKUEhsEhTxSMisdCaEMh6JE5d0BcPDJEr+j2AZG0eTOILkSeKYbEXpPpMY8wQoMXHk02Uzwi7HJKiCyCTeYPYp8U4xkcQf48aQ8zMJiI2ZxA8Uj5hED3ozTaIPCx0mcU9lsIkHmMlKdHl6lbhzhbjBfwiTuMU5euJVmt0khj4CnvGTxSRgYdUkBvgDkHhidv9w0pR4xOJliZPtCMVQUKgkGEPH5OgHmoaHT9T9BLVA4KJapdEX5hEoDBL8oCHP4T/ItajRO5SxFKZkLhWQiXk0ZIKvmHKgKkg4H3F+o0ZPmJ5GtPoKLUMysYz+4Z+cGuGyoB6+chYbPVDP1Oge0xQsLSGdLh9uZf+sh0P+T0hkeOSVoHxSIuT5fwLbVpNYxMbXJJYcwwLv6iY22dNbOKcwiRy+dyaxAe3RJrHGJNYxJZtEltJBiVWmZHmml9BkMBB+Mwnlo+BHrGQRmn0TU9Q5nJiDKFMCy0WTSLq8U4w4A9mixCxqbBIJHIyYxDQV2gxvqTx12IOZ2WEfksbDe/S9Hj7A9uswxls+RBcVknxdnLrl4TeMrQ5/ULz+FpQY6+EfmFL80ZaCFp4h9GD3N7Gg1cMPzL7DNyjbHr6j9wtz5Jwj5xiHPv0N6n8efvlfXmPDQcVr5LBlF7zwGls4nxLUba+xKWVtwFSz14BCMAFV9BpZMkM3WGPdAXzX6AsjuY1l1F5w0WuEuMANvPIaUFXWoGWSBo8mBMVjQaZXAZmcGmlZjAgEXmMJEnSC5sNrQCkLUOe9RgrdDDz3Gkl8+wTViteYJ888eebkNRYAy7QMJQG74TUS+F0IFO9ZjGlBK4WawWTUMPJ6CkjFNU5KBJ+8RgCzEUApisUeRVBlvcYURitQ5sl8BJFJnT/HqG4o9L6gTKr5e78k+MCJE8qMQfroXZ2/wV8vnb+FUKvO076Yzt/5t4IUNM+/UIt3/tUHDqG4Nv8Plwnyj5yc8wPaAs4/wWyczn9CmlHnv/iFzU+Ywht0eOXfoag9P6Jq7Txs0un8n0wZOv9NtbD5HyqszccwOpz/lRwamC0E5D38g+h144dd+cOm+oaOSEEZNL9+G5DxGvidLSATaOOLvF8Qxmx8QmJZUMngmsiwFADLGAMVOPAa7xwa75h/Gm9kfyP7B90fdMv0ug1EXChTF4DzmVGfGXVI95DuV7pfOdoeoS20wUmp8Q8CvYKwtvoEEc0G70Y2YJ5YQDr8AT8zBTGA7vEbVFBGVx8MfTD0oMOwAc1XjVv/HiDBN8zvhnmzue5YvC5+JwhisFzKG96AdnwBSeYap9ENGClpwEhJA/pUBSSkjazacF1gMEJYsgHR2kbDjwAYsnUskRot5tpirk26m3RX+XJUOcRPER2XURo1ctQY/wweZ0i0zNemLFNFo+RAhncFkSo472+cQJeCYMlrFOELRZ2NQ0aCovXGMU57BV+8BkyeCOCF3sPY2MOEAgWBDRroaBwgPm5WN/Z9KUOe80seZ+GNbT8G9L3GDpPekaT70CTiqdMZ3tgYJrDO/eviKoMezuElGM7zrs4wibdjbxOrsyHtgw8V5EXCKhSM2RC/IWxYwz2G8Az9asMGDtptWIf2ahu2GNSk9s4wwrtowzYGjQ0v3NM57jqEl3i1bdjxExdCrngvIMzDMoENtyFmEu7gaNKGe9ArER5A45sN92FuyoZFmfNteMjL9uExdPna8Iim+0LIn9qwRNWcYQUvpA1PaHU6fMdfeRviOooNPyDdH47o/qJ0S/hJ7Z7hhH7U5xn+0P3toyC/1IIZxvhrYEPZPqIMtyzqDW4ahE4KJOzyQn74QPPQ4T3lP8I+E+q5Fnh05J8jAwa9UI4lfKYF5nBIe4jhqzMDsIA16jAH9YPDDa4KFrHK00N3aW4I0WO9B2FkGy7w2kUI3eY2XIKadhsu4heBDZcpjxOGsuSzIQTlhGT8AHVeZR9nYdc6hAC0DTd42yXMQWWkDbfw+8yGmzTRHSosoGxoaAAypFRRGLimdtpIw2lHEo7MQr4ndFpDwhRbDMPMhnNUlLgXyhdK7y3jv4XON7CO2svQVNwPVY3urdBw2d8fv8d70ESi96ZZ5xnManqP51qnVF+5l4TawXSJ2uvSZagdTJ/IaxKkK3RTg3GQLvLhyHHhIkqQ3sVkFaT3oGsuvY+/n0H6wPltOwJ1B0G6IIuYIL0DnZRB+hIvY5C+cgld43MYpNvuKWJJzqlOL33hSB2Hc0G6weybjo+6EdNVXHNMn+F4L0jX6PWMKTdIv+D7FqRfsbkM0kOX0D8X99E9DciPRWKQvndeD7xln+65pz7+GAXpG/d060iX/Hfu4Y8aGNPfPhSS/cgqI0jHLg/oygjSn/LVD9JfDHAKENMTbIuC9JsPrWXvjnlEtg9ux0+beOOCdNKVPIVvRJCew6lmkIZi2yCdwDAL0tM+tKHN+tCCprEEDdIGr1uQthBiCdKBS33KJaGo4TKd8y0ibMr3OEhvOY6sj+6FxYwgvYYbXkEaZuODdIjjgCC97EjGkRUfvZzG/BCkF1yqS9i7du5k0rVRgaZSox1MWdG2b8+F5B3ZhyqdiPYEbbRLhiPiMS80Re5eU1R03CfYOtqo4kgJeo+jMma26IzuGvGUWIXIWtSk1pGoRa86tXtEDT6cEy/QjDaiIuYocpld4a2PrvFCRh1o5YguqU05upNesdENjDHYiP/SbdSDUL2N7mELz0YPkImLYJrXRrxQFv0jPlLxSPTK2g6h3dJGL9QBHY0gQhh9yAbKRtij2egdGygbjV0+E2zmbPSJUWyjL6yKbPRL5cbRt69PhfzARG70BzO3kUKX2GiK4oeRxcm2jQJ/Gq2gqZw4MpgqoxnXiLM+0ku4ZKed35wj81A4FCVpxj5KsYCLspGw0RJmuCgNMb9ogVqSogyEoWy04jIK8VvfRsu4WWKjNdq+jdaZQtalver6ZdM9bcFGbpRj827wjGkB+1r1BNMZpr6I98DUl7g1radh2bi+gNsk9YxvZK9dX4FIVz2k1zIMJtfX0DIGfyAFs7RsUl8lF+5TmfoWA3KwJIK/lwtCFLe89SmYHTJ1y1wCbJrrWoaKqRsYvzL1GW6u67OOQHuOkGnyzTm/eVzIqCeZQMp5jfAlN/UPyNLV36T5TH3MzRz+2YdCPrGmMfhTLxtHCAE8CvmmaFv9B3toyAp0hHS5Ta/fYYtq6jeu6LeO9LDNq/chnVa/56YTv+TBzq13/ZE7P/zTHwoZ4oCi/gyRE/zEl617/QxSGvh3jQpU+dDEYsjUWy6HuiMN36CNzvHr3OAPOrJqs68iiqvUr3BlBL/jjRBZXaK1L5lggdlu+zMIyUOQsL7PjjvwpxNC9lz7H9Hv2BX/kIcz9SL9uA4x9QpTK+GySr2MRrFd7rdtdxFKgLoLWE100yip7a7wIcNLl13amrbdkH7rxDXiKupqu1mKuXa3cGZju5uokO1ukCOHm41dBS3o3SncguwG9Dd0a/zhtd1ZzB/dGQZMExPEeay3unN0p/AidZPibk7Jd7OpcOIn+O01NdpYcOI1kaxA4DWtfPwFMl4zkLdEADwJmV0F7r3mNDYRgirymvit3cRv7SYNuAvKgqqZlJILHHjNFHaVwLHXnIOtUEFZjDXnIc4hCHdaiiqgnrzmAg6/mosyxTSX0BSCMqc0Q3gsY/0OPPSaGZxzC1opAo5EBBJeMws5VUH5KjZXIaIpKOvdJqyRNWFtTUDqmANvTmbsJvb9Tez7m5sQpxIU5xbGUROyB81tNNE2XHk0RB7FKKBuBdRth3XjmrcJYw9NGHuw3bFMjc19DHvBDa95IAtzgUuveShTtUDfaxYhTiwoMSEb2qTi/+YxPs5Nils0S7JdElj2mmUkXUaGMAEkIK4K+qEiO68m7EQJyO7QdnFTuMmLg01eHGzWWAos/Zsw1yogK6BmAwk2kAyEmAWke233Hom3MfEL2nmvGWF/ApSSQ5BFANa6u9iA2+4QSqpt9xWXw233hda3u89UotWtQRq7e8axVyWeElvQ5tVtQvi+20BZIRTWpLUR2+XV3y6vAXcjYpt4zS1D98q9J5f06yDym2wHBNSp13xH30LheXMEUQdB2QY2PzADCcqWvckNuKCWWb+7j41Edw/bji6MVTZ/EPXH5VBkDhXiifMq4wjYdkvip7Z4FLZ1zD8JWyXpVlOBbTlTgRkwU7FoEFMxFP+taMz8lVn3MOPINOXiKgl/+lDIPC0oVeZgj6qSwsxeSfrTiLXEo90KvoKmsuDYVpheBt9/U1mG8IupQI7FVNZxEFdZYzK452wqWRwBVrZcppsuygYEVUyFlvy2ijjd2NrFJ6xyR9tblS6sApnKLRO4YWY9fxq1e+BBYeUeu39TeYKpAFPhgWvl0U+giFCqZipDyOlVXinqXHnBjFt5xjbTVD7IPMJZkqm88/tXmeDmZmXscv0iwycvNFZ+YfvIVGI2rfRPJOQbP7dN5Q9GVkxlB78oTKXAj14lz29UZZv8BxgzprLPT3GFZ98VvKGmcoztuakcUVKxUmQND2FpxFQq2C6bygnLXKb5skqJ5+IVGIoxlTNYVjKVKhvnFOcmptLiQ9O1UYNnnpU6L5RWLjDkTOXcNUXkSNtxXvv2QMiVe7p0pANP9ffGzjnA6fFWGwuTrci3e0LOYcpRbV0wvIPpMe3b03tBfZAHNpOCJg6Bj21BdTADnp9HhNZaQFujzwih+2VwZo7hvs2TXyP0GKFm80kwWBzD598z3IkV+O816P8Gd20Wsa6H4GlXiafA93u6y8QSsYLcnx4Qd8kAZ5GmkoUDQo/g/u4Krp3QY5u4A++NF7gLNRYByQfrzwxdB6qQ7mW6V8H/i6rbox7w5peNxGaQcSVF7r6jIFc7bIY1xmWlC0U2yTx9psAZ/LKKfVYowVCmvLbFFFwTXgs+J5nVleBRmt2xhDBZUcON8h2heMuG8Z7pi5La71s2PRrx5YXJwWOIFjBvDXqwadPHbLwI7kkHmJuhzznw/BP870zxFd11j6r3D9BSy6ibGaIrbRMDRs8iXXNRYOpV5vTC4aHpUxf8R4+7CiPViTUGXnGMoIpq6g4lrbCFWgxEmw3Z18WM4DrAzC0gqVW0hJr9B/fNI7nn4J7fpRs8psCyZC1LjZoOmWc1C++fO3bLBXz6YHlGz6kljIYnRPw6Yk04HOfQMzqfE8wyjVk20xpqotdZ5Klp1oQDos1iHXbpk+DYjpGbzC7wcUk24Z9hOiV2/JitsM+4JTS6+mKrvfC9+95AChVU1cxiSAUr7Ii1Kw5EVvsBfa1bHL6K7+8zG8LC326egSezh6GxyMpjfL6y21ocFK1XonP/I/I9P/sD7rHdWor+fBWG1xx+E7o5lFqos024obDK1glQVovOsVMuZbSF3pmwG/lWt1jbfQxjcwAftbpJf77tX3wRWyisvbdswUOkmUQP6nvmfsVyFljmxB/5r4ns9nM6OexafAceFuhGN6s7DDD9+8UOHbAgcNsBG3AO1VWXbIATvud7TYZeorAn6IJf9LweM4F9lMncHTCZb/YDSt/N0uODwwIDQg/IuMkmXUXUec5a5X3gA4aizrm51E0TGfqj7+wLxuwjZ4zrdQ69G+A9Ou+CU8scav3Dhtz7Zb1qLNw0ZzFMmLcZ9hwaQ9+iYZ7QXOYTjdlEcvZjiu8km0hxOnvgG/OBHjB/qH6wWmEDcu6d4ruwxqloDf2sNg9Zcn4Lgg8WFLFGM3Ry2A4xJ+dZxHW+fjP4RF3Knl79jWizJbMDfYubzl5y+EUplPATKhzCCQ45VfiONbMK32SLqMIPfPdVOIK4R/iHT134g+NFFX47EvvmVsgDhdrCPr7dKlPAPQQV3uKPa3gDAzM4xm0K6TKnF9jaCfH/SIVDiMWHr1goqfARBpgVznCR2hP+2Ktw4CdkIRc2YM9RhVglq7DFjMImFuUKx+g3Qk5dtBq0T4SXzKMj2wMVXlPyDufhKFDEfKH0Q+H0XPAcqovCPRxvq3CXGRz4QVvIPnlpqjvc9hMnQnZYqwLOklVYdlmW/OmOkAptUocnfvAppOhHgocsx7Er4RF+WofLLqPQkRUcsakw454WsMtRYZqxllwmi1xchLjrpsIczGCFWyzkpouzymsDYdaPBXGsosI1WhwNDSup/cSWkMCVzrpWmKLF8VCRI8UKJmn6NJzHbwMVzkH/jgqn2Tb8tRHOknkGyrTMIAVTlGYwx2t8g3n8hDWDBM46BtOQ/DaDGazqBrNETd0VsCfVFGJ5oEFLU2YwBQ0hA8XrdYMc9gJmsIFdjhlsusR55W+QJa46rzVe5Rusu6eQJxmDZdwENQNa/jYD/ogfpF3BFviwiIFgBiU+lLEbGJxA95YZ8GRpcEgsYlk+oIqOAaUqBnuyhzaDfV+WZmZwQOuzg23s6MyAtgDNoEAVHIMdLFcHHfSbGVzx4Ro6oMygjZuKgwinFINzmAs1gwvqPhzUoS9j0GKmNL07qGLNPThj9BqkMQbPOCM0gxf83DCDV0ptDIZcUA+oiGPwSMUmA14KNQNKWAzuiQ84ZRr06O4TbyCrbga3/vSPkC7zucNfIjOgJMjgG2IEtD9lBjGEWWiYygw+eX1p8OWacUyuCUOoOGRAxSGDERGbiQP/Z1ZAbQeCZu5Q0Kpv+Mw80v0nuFwWuN8SODqBawPhG/uC+vEc7os7xJ56h4/NI167C9RTCE2+0OdaMFgr098IHmwisIZIf3tI+xXRb1uCD+BV6lMw9YNoi/8Eh2tgqIDN3CbBcdaEO0CULpNE3moxgSjrXyhqBYBaHSGyfbes2hejPSGsDY/lAgPTSH/jmO6MYJ5V6a3DO4EiqjEawR4quOdQkCCH8pkyGyAFf33KAqaWgbkdcL6iGa63EXh3xsAs3XOo3wPjvIFvfwnu7SZ9hmz+FlNhM77uCq4jcZvNMZW+4AiFtpmG4DmedeaWxUIbHM6xOZBe1uX7C48RKnGJumn1DG9ZAsKNiG9siWskZw7QxluIclGE719IXzS0PUDF7D3GSLACPv2xgA7QdGJI6fsUsMUyZI6AsvYRnzTYzUmH/iW4T2MiW/IUY1LNJujGAFDXmj7T9BkSX4gD4hPxH3sEgy6waK0giVpnUBvzMkWWOjKv9oDHByh9gsOgd0tEVJ37QI1RGnuHcmzxTdhCU20hyA4isK2jMfUYQ8fc/dAH/hGLwozNHca5XsHwsy9oAvv7ys7AuDLT0+xiDo5NtNvWjcAORoX54huSm2eyCLPniDODkWv+MFrsJVpW3aGVm8zz+Z79esrckM883i3zyZe6ee2ppxq+H+tPftHrvMIMZAsnaq19nL8Cf73WnqwdBEzea+HSkUDBa1G7eavgLwB01WvlcbArqG681jb+rwrqJ69VgYlPQflytU78W4Aae60y/nMD016rhDMqoPgcI0UcG7UwiQsYr1X0lwDiwhdZIPBaLdwMbTWJkOEVEN86zvhaNRxECSLsjG7qeWpVocVQULKhoq8WFX21rv17QN1rXcF1BReM1bZgrLbVwZ8koPJaFzgWAEq9znEiK6i2vFYE7gjcbf8UUPFaOH0TEL8e1JoJyiTfemC1H1jte3BA/qN1B9cdXF20UJctdOu3ARJyAxji4LL1+n+U0kBHcOsZYc+I+uSg5LUGfs1rPeLxH0L/wTVhlDGcY/B8MaEv+n6C6xNBHzIMBBAE0+sC4vkO1ztcb7JEEECcP3jCxnrrF3F+2fCxLG1a+Oi0fhAMC+utb7i+4QrQtQH6yqIbLXoVop4tLHVaWtZXLSXrLgF16LWmwIP1RWsePQ+dBAIShcJZLQpntfBvW0Cdea1ZMMzieUYWqgI65bWmkcg0PBMYUjBg3VrBbcgW1Ld4rQzdGbqXwbuMooQQWxWU5l1CmZewXGgtQpFIC+a3vNYCKrWASmG1JyB5b2EhIyiLoNYm3pNNJgvFaALCkGNmOfqu071O9xrda3SvIr9VyC23svTO0uJjtQc5D1XtO3LviBNurHbd050jN1xRQotlRsirFFRVhzT9Vn2GQJSqQsJJVQc4AlTVJz8Bvn8u6BGtpqpjnr9WJxQlr37iX6OqfmEYq+oIug1V9QN/AlX1zT29Q55UVf/8VcEYZ7jVX5rbrH5jcaWqP1yYVvdx21RVDxzZpZny6h4uoqpqwU9cCpE1uiz4q9tQgqqqeTSFqkLjgZCKi1dyEcq8+VM9cp7HWIQqqM0cCmmyji0K3FfrkNxV1QYWwtUzSG6o6in+dKkqFIio6pUryDVW5tUO7aVXL5nCOe1zVi+wEaq2MecpKDdFCQPqxa9q8imaDa1ivKrqHHYA1Xm6kxDUr3KRXp0hztLoaDUB7fvVaQZncGitqjIyERLKilVVl7nyry7i+FRVl3CaXE0TF9i+uMOpqjmc0KvqBq6Uq+oa/k2q6joudavqKqT+M7s4TL7yC3uAvIC+fYSzDmgI2JsAqF4Er88FgqW04MuKwB081VkWDMfbgp2cQJoRjqeR2FULMdZm4U6Pwf17JWjiD/As78N/fhF5MdJyCfjEzBXCbiqIv460N6/pcUSsgWNBILWJbJ8QbWcOKR+fMk2wFebpPEYS4SHcXQvGV2Rrs0javCHU9qbpv02fA7pZkleGvjKuQnbpKTZRzAQe4JGAs4gq6F+mfo7mMdtJwRjNsXfBRlqDb36a7lWmipKrU5cDfAoA1WYF048s+h+81+EcDMH3hxqZ1i0YX9Bv329wTiGm2W+DJcFKp0MEog7frNQL0vsGw44m1xa56H6+IaI2tuJyR8dPo5Lm9Bsex+9wu3ZId4A11FJ/nNBnh1ggcgylt4kVZtVE3AgpHCTpPQKuYizZfST/FtAbxSjA1yxx7JygmuYTI8j8/qCeK+tElr2PEdQukgW4gpSCuUUiah+iUcYI+lhGOIqhMG6URc2KbLNplqK8xHSRX4Bm0F+s4xdK9DFiFsrTuRGOVnTuHfcPcvi3r3MTeRl1bgwRq9wX7MzmPjGz6twv5gydi93TN4Vfc3/YPercHWRgc85wbe4Gckc614c4ns71qKEZpsoRjfZrc0+0Bp0bQLpf5x4hfZijIG5uiJ/5OveKf8I694LFg849Uzo3V2MmZ9jW6VzV5XUKk9C5FkOaEAzVuQZkyXIX9OI/Rp2LKPeb43Smc1cu6iV0jOoc7j/o3A5zz+PYRue2mcKB497zLUq0C+FknTt2kY5w1KRzRUgk5w4hkZyrQMJD507w+1PnSjgH0rklKgbPLbrK4/6mzuGfh5AV5piB9efcsvMK+bAO/fK5NbpXaVU4l2VltiBPqHObUIad28DZi87lyKYg26VzUzArnQugokznLOMYzL06p2lmNjeLq4c6N+NKNU1d5LkEPgw6N4+5WOfmXFIpppsU7Az9G5lVi5BA3I5xnKDiFub1uMmbW3HDtzIjxzVab47P8DdNxZBlU/EpLmPF147hCtaY4w4/uPGF8zvHvy0VR+6p7Qj2u0L2eQUu3sMHK95ljju4fx4XXHjeEegWVXGF3+P4hGz826biEv7XqRhKB1RMBQQqLuJwMD6U9aKKqd4iHuOMMf7iByn+ZPwPrg3id1c//kCL/yBnqOKYN/HiH0q3xd8uhz6ukcU9V/oHfHTjO9z/i7vO69aRG0eGUHWv4lcX94VXBGOeL8ZPvNcRD3CAouJHx/+PTQmJBBUvu1qEjiwx0iLrtwBdrSpO4wwx3mKbbTJ4wzUT/9HGqzRnHWcZJeBKJbZsDF6Fi7U0sCx7bp8EdWssaI9fBX8CeDxfIHCUh7epCqqNN8HULeAO0AXbZB0MhS3BuA2Pf0vw+Okg9kdDsAMw8Y/g8inCdA0e3V3BxYnA/EggSDzANz0QzLzAeTUvOLsPAOvcPVxnCEleI6PsP8GvIjJG2S8PBdItJBWijDaLHA0ro/YQTw2ReLC8idDnJN3b8E/A4zUrOAPn7IbAAqucR0rmAbHtF0qhV8lcR95vJSQxD6feBcv3LrlRHX0CvoUhnL/gyzzTeQwnGOxCBOYT5Lq6RCfacOcRzm10ieop8MlHVLxR+ek9gSy4frOsJlII0Id/SKAITxuUgXvIUrMi5hMNaxtoo61VNswfC5ikN9xXdTp/id9Ett3eM/sBtZqZphPFtw/3bFK0fBIt+oOIethl9odENtXlNHGGPgd0421RWG3GU5i4VDzvBuwcFn1xChI8Kk66yWXWl4VqPMNXYZpjHAqU9fY3TSZEHdmGagg8Cl7RtEJ0Ta+2LxNdFGEVraNzznfRBYT/dFSHhHvUoJh11MRWQkctfKSiU4ghR1VcLoqgIV7jGnxSSImfvuiE+Rxih6yjIoaMjo5c0DGNe0e7nPajPdwZifZlk6ajA1yD0dE2boZEeWKBd0YifoGiP5xs6ugbdxWjH8y5EWQWdPTrivspA1TIF+TNdDSBkH70hmk/eifzCFebog98wCCkKZ8AiGw2hPzjRcvoEYIGOhowtydW/p6t8wBZbh31mHOf9b3BsiC6ZT1ls+X1W35i3es3kUS/IdN5H63XP8M+UUc5VnCDCW8yySx9Vl2V1vBZi9axs9TRMiY/HWVo4iNK47YWpDGbQhax2dDREj5fUZL3TKM53muN5pleAjOdjqbxN0NHs9iP6UjLNlJHBndkdGQhR6qjABcZoimuPyLlV71+B+6W/zMtYN+PgakO8B/cZtTwoHzxDu54jjgveJWBd2YDeA12dfsGfHtC1O8Mk/kA6jQT+2YyJfDkKnDfDuF/8w5820bCH1XB6xV41AqCwdoavLt/YA+bgp/r5PsCyzJQyYZSUCZMYansEw+Iu6hTG84GYtqNCO4zVFVvFJHM/A/8jxD68Ahnb1XwbRMcp8hIXW2R+wruPOqulhRwgqIbjUIHuRF8emgTE52Av7oHnzNUTK2m6HOIPGrIY6bI0sHDZMpwD1CwhwAebH11eAnvbTSGyc8ij+VXuAtsqsOW4F0ezjICVXvI1o/hc4DAaTR+YP/B4xoNM50UuEDaqyisrdSJZ8imVieybGwzW0Hx7VWIVFtMRCNVMx4TJ+Bf/QVeoNL2FwPEtNBg1rKRVtnLq2tMgSOh16bPIhE8dpXVO8BIsPto/UD2WZLmEjjt/DlCf5/hU15mrCO4Z1GqVwwxc4cRcsV+y6E1ggXNNkGFV9C8apN9eLmDKGgd9XTGGvyyOzHczdcSc2YnlV/g88eufebIPbdMAKFPfWZ/L/jBzvjEuDNPGa9/IS9g/1wmlz7+S/bbnAggAduHUFR/F5dZ+zs4FurjN2R/m1cjK/6P18fKrF8GlCAI2D/2m17/0J94Fuv/CyH//HfBJ/cwwCbeygaAT8+ODB155RUg2W3A8IjsNiRTKzsQiLnnuo48QP+rbDZg0VI2IrhaJBuRjOCPY/hmZr8wtik7Gv0i5I+Xg2QvJEs7K5uhruCHy3FE7i+K6Mp2SCYZK/ukbcEx7/jIRgC3BGQnoJDHMXM6gl1LmyvD+KfsB2RusLJJ2BI8gTkxK9sM5rQDoTCbK0AMV3Yb84K7smcXckClvbJ7CapC2jRgIhubBCpwDuHHnBPuz3FRbXPXOECzOZw3CKniEoBskJBEjYhralb2R4xTd6TlSt5kctOuRAkfxZuFYKZsBHA9S3YCkNaX3UFBcApmxPrrUI8rW48AnWFkXW1lmyGLWSs7ENmIWdmBsJ9WWecsLKrI3kZvClnjXS7ZwwRDITnarZHtDfJe4C0z2SShL5ZcyLIjIY5+LDZOeMpA+28/wG/JvkXT9I18DfsaQu19JZuC/hR0ofbnGTaHkZfCF6aflCbuz0JmuT8D9Rr9afxh7Cf8QLa/ZS5F+rhAp/rQlar6Beq+6sPujJIhrkAw3JWMfMgJ9KnFo1+k1xEVdsjoxu9rDPUVIWX+UO9D3lbJ24AEcI9Q9auO7YxRa9xuyCdVFuXydUW422HJZxeSD/K6WVn+yKtHz3NGuuC2RD5sJJcuCGcJQq7JceOCbvnQJd5BOlz172lxtf+AbVC/xxVWv8+Hf9jD9B8h3qv6A7Sa6j+xRM/4a636PFHtv2LZpfq8I6r63Cf1qS2wP8KBYP8DixXV/6SB1/4XXmnVH+Nmv+rzOm3ZQH1IP8ZtEdWnVpD+HxQxlC33PtKFstST3pSlXp/njtLJqCHVmCnpMwRM4yK66kNoXfVn2X9Jnnn2eQopfQ+dJjIQECUtiyXVX4B0SH/RXxNcwp5WlwOIO/SXybQC6XHVp2Km/hqPW2W4Y0hwG9XfgEaHcgJHAu0ZiKELqqrXhviAwLLXlrDvOxC17rWnoS1QUP167Tlc8BR88drz/i1AmqOdpG8SvtQT0VayQxOoee0pqZaATB9ti/dOEOlAPLitsfgEPnltI8O6DeNEAtLQ7XVc6m9npU0FjNdehWB5G6rKBZSkBkGFds7PA9peewNXNAVtzmsvQn+FoPLaS/4nwHx47TTKC1k1r73gz4RAK0lkfELZa6/4NwAZrG0c6QrIArS97CfugVoa6Ij1OMbNcUEl1Tv0dwGSdRGGh9snbEjoufXaFaiEEDRSjhIEoduQmW5DFkJAohTIUCADxefbFJ9vb6P7BRe9dh4WLgVTXnsfZdpHXQ9kMdjGH1mvvcuW30XL78nQEFDS8ufYYLd5etS+YO9dsNXbMroE5DvQjshCVYttCE+0r5FoB65LpnyGl19wy2vXZPtkQVKHXvsUZYHmnTZmaAEk3PQTt8BZqV8L81a77kdeu4EZvc37te0B/t21n/AfTxDuf8RHKG4RDKTxX/EWt4fkGTL0GZebBK207YtvE167i7M1QZk427Rj1Kb27/YNinQra2YBmeTaPcbvo0L3GDX35H2QN1tgyWvH0OoFlOL/oud/MT+3ZS82A5RK/rBtf9C2f0jmD38E2yOcXAlWvPaHnzglSnu+sdBvzPQd97HbY3n5BWQUTzCrCkpTfvqJGhBj54t1+4JgdJC99xNnQnq4ThNk+7wLmL3ByxRkb3HuFWTvfAQ9Y9YOsi+OvDoydASHkEH2ETNekB3gvnqQfXJpfDqOL0fGjkwgQx1k39zTuyMjR2i7KshSE7ke13GUOsZyWwjMiwXZb4ydIPvDu4fZ2MXCz8Ugu+tnr4TsoQGD7D4vZmYP3NM2+jvI5h0pOM8dbCKDbAl3eoNsWQZWkD1xQRV83oLsIf7eBNkibAUE2SMXdkzDvtm6Y2k40pSXLMi2XAueYvcYZKtsGAzmIFuT9y/IdnBWFWQvZWkUZK94PTZ77T8KtmWxEWQjbIWD7Lmr5AUvcWY1ZIKCrMEN22yAS9dBdgrqkYKsYkJJGfVBds7lPY3vWJCdkVc2yM7if3yQDZn5Mi2YZjOObwUzeJCFZLIQSE8F2UXikmw6g2yOhd8gbuI9CbI4IQuyWea5yt5Yc5VfpxqkcQraT/U4yaPs8TxOmcdzsGg4nsZFTD2e5YnBGH+49NjwfHms+RDg4zC2NOY4xk9dPVYM2IByoXFOFod6vEVLrGNom9LjVRj90uMsbN7pMW5f6/Ea9NvoccjDgXGGCSwQ0y7lJdjI1eNFaRybh1CgzV/wbku+zfVuPnIEc5PNX/PeZr6DSyr5S14sy59hrZmv4TJ4HjeQbb7qojR5nz/f4jX2PNSG2XzDBR1Bl6jNH+Pydv6QkYpEd7clX+GN8HwJa898Abcs8zu0IZjfxhWYfJ64zyub+QMWbY/xoSvD5n+J30TMGzb/R/eI+IEDa5t/g4ISm3+nicD/kXRmbanDQBi+51dAn6YFQQVBwV1RQfSIigsCIgouCCKKIG4//uT9ejNN02mWyTZJZtn+1KcpY9XbHutlItgLR0lYLuA87HyAbSdL/n1FStHbHsIQb79I9W17oL/a0qPb7gSkvEVNxdu+EwkeuSzxtp8QwvO2g83F9oN0aLdX0BXztldVl5w04beXVb0N1XiTEwxve00v69pwbC+EY1AhjXFHbxvBcvtIBellpfy6neFSwdtelEr/9kzY7gq243LNuB3FL8V2THnMSwd5O4Fy7PaslP62HaZ5bzsSPLzg4QcPN3gYsnzHImPU8gzzenimw8P9yult/KW3+5TebvM8Wu96mZ/lsRBT1G3IzUpWdWzLajmscUYvWfF84yWEescpGbMcJ8VpjtPh6I19LMDPjWGonLGYq/GmuOrxhi6dx8gHOONcmPCq4IqwxAeOXS6DnLHPEZMz9jjKH0dgI8eOsGbhQ8cJ2Voaz8PVjucw3TaOYT1vjHiEM5btgLFG8kKG7YMTXWIJ9nqHtHvvQC5SeiWGS2+fXtk7xp5Br8yU6vWOZKqhh6cZr7eDlIzXy8vgRG+LadbrFdEQ6xWEsKfwrmV7vF4jPGNbtFdne9lrybRC7wY/Ib1rYZ3B2Xo9aQp5vVPtf3onav9eVeYzehcwP17vPIjs4/3OQ7DSwmcsDXi9rnxd9oaM894r+7Qe7LvXe5EWeK/DWO21Va871jOvdyu97d4TSp+9RyX2IPeovW+V6ws7Yl7vD/7P62EeyeuNgrh3TRq9N8VNES72ep+IWHtIYtqx1xsHiHGmrN6MnH72oqLtnEoi8xC9hExx9CQa4/UishzR82GWvZ4n6htscvTccNROgL1VGdlA4JZS5kRDmavykMu9tI91OWXtrUndtpdW7in4XK+XDUcp2JKmsN4icgrjC3WKczYY46r62iXWF8en2FF3xif4D3HGZ+pAFYafM27aqdgZXwtZe6/xjS5lxlcS0Rg31JX3kIke7wqrqG4vue3xFkPdGe9oszI+4rJsjNamMz6GbXPGZSGXuK0Z7+v3Q4jnjFEhNMVVLLwVVySsXMRatSnmbIamuCmd4+JGODpjH+vSbyuu4QfGFNMoHBYXpGRYTDFFmqLcGBaz0qgrLqEJXlzEhloxbld/U5yR+HIxGnyfR+zdFOfCQwtnkYAxRUcSxUVfxfJoOVN0g7Slf1f8lixx8ctWxRT/mNRNUQb5iyMEiIuSzC2+MWWa4hSN8+KnoiYq7ljCzMW+8uxhj674rM9drEGZ4tA2oCm+Cg7Y55viiyiBHTRTvJN/v+KtdPGKOhwyxUeUBYsPEhkuNiQFXayrvjXBq4BsLZgaU7xBxLooj4rFa8Ez6TMWKzC0pniKOLopniDmZ4pVYVyixl48D89M7OMwyBUvU6ZYwiNMcd/2C1MMFMSLR6LdP1ZOU9xR4fMYyytu60MR352mWEBSzxT3uJM2xV2LViuhrmuh+x2q7du2s8C7D3EhkQLazlE74ODEwuheqHYk9COh/1P4n8J25yWQD3HN8QQkmkM9C+xOrrZlJ5KaXFXXdoSxI4y8wnmF91jlLfwN1XYVvavoosJFhQt0Jgu9mxAWLMZAyyjWRG4L3a8QFzVHoRrHHRYkQjXIbsFlqHbN9s9COwPW1DIWeh+h2o3tE1wAlUO1U4iCOY3aid6l2lw7s8uwBd5mCAvHY0GbBmPeApvbuV03LLC/VSEih7uh2iXJXhL5TOjZDtFalw0adyuTUK1vtxc1nAlYYGzSA3ahFhrbAC92vrfAsom1IYyzhfOh2ivXsTWM/dXu7KqFXY5Q7dYOegvsa4dQh1AbdtFCu9eqPeC+vIYgRQ0l69qTivCkIjyGYzGg6YZq34gNAm2pvthcWWg3V7XfsIAJ1X7gxi38DNWQbqwh3Vh7R5TVQvcihE0PW6APTjgstBvzGuLyNczs1Lgqt8AWbmr32Zj62A7VPrkKstCbDTlZu/EvhWqSMgDav6NKKapwHEEBC51OqIaNP4x62FLPCmVWKAn9mlB4XujzYdsjpfiPoY+rELc7tjYOJZC3VK6A5oD0KJcLPyyH7IdqvtLysRNQ8+xQssCuB7VlEWZZqeTY6Ftod/e1VXaxFs6GavAbFhRCNbiU2hpVXmOpqG2GlwH28wayhUBb+xRNnVJTJzHxAQzV0spigf2EhY4l4KI+LtpVpZbhOsxC25p4+AQa2yuXsG6DeZJtS82seJws1m2WzplYgJbTynLm42TXBNe5xc9uYLjOefdYV959xDfeXRaM4Smn1c47BvmcrMN69u5IViKLlVHnPaKFKuvhJ9V5n5MmUtaHh3pnC2TaLiulaRupLrc9O7RN22diakdwb2Hajuyk2ilryb4k9H1WcE5zfnteL3J7244pPBOe6dmHNG/aGUXJEGV7SeEss3w7KX3ydkofFvQhLbe27TU0PtrreHppb+jDpnx6taWB014WXFW91ma5PuYoe8k+5uEA13TLvBbjutNOoJZqyMhY/nFthu7lrLFTddZcTHOu+RxeuUy3XfsWkSALojcWrnPtaqdzyy2sbaI2traho9+1Zf2ZE4Owtip7u0XZeHPWFhlczposOTpr2eBtCQOlzlpKBcK2g7OWhmFFhMiuPG1pybQf2LGY9iNacqb9JO3z9i3a/u07zMK02xzdmnbHTlam/YI10/aAexLTfsW+tGnLxK5pd1lO2s/wrKbdDx5j1H7akoEw7U+9TGVZtv0WpDqS4Zr2h3j7tXMmGGetCl/l2oUL053tLxXzWxYF2j+YbDLtXxa7tsxcmzY+2027iOnV9paabVttm1d4RyZn2v+kutM+UmJl3I2b9nHw/77wSvpyIL897UMZEm2fK+6C1b99id8M067Kgm37JPj1VESpCO1MdnHa19ISazfVx29ktbbdwv5A+woPR+2aLJ62G2j8uXYNtxxaNkEbuMULrtAtqzC2EL+KrwPOry2/cGghRp6d7Cynya5lGn4sbMkEqeUa7Ma/iN6fa9kSO/ayc7rPLnYkkVeUTePineCTRm12Xgn32Bi6lvlxP+2jiwCA5X44rLAcT9vCoe7PLccDHHBwYbkqO1xdy2LFyvbxZonuFiUuYNkqTFxaHkuPsf1n6YJFwUKSn3CrLqbNGa5K+qz4G6B+6VbdcnIIXsDDnYXsFH93beGKY4HZfrLQu3og+m+OmMOkhcU0wa2KhW7GI+xHgXO/xKy/89PBM/DrRDg3wtkifLwlTJLxdmYV3gQu7QPXvoG3LhnaBdKGKzFgLQ9+jvSdE6WcG5Ja/4Xwa1/4pOY9ZZXOvKBiXgyY7SvhK+WVO4UjfC22iJn6xHSWgW8/FvrzlxYW/pFMWQVPUmnfuyD8klH8lWIKhF+3FF618JYyOZEiqZdVyhfycPOU242niJ8si4J/KgFlcg8PCP8eEd+5JeaJr2air3HVf51ye3OHFn486+OEiFtK5jRopW+C30t88774tkALmtycwkNl0RM22ZkDEvcOVywUlYyn0t1SmReCzi+VcYurIs+rCrEH4tcOMaUNar1W0Nc6YYcCeJ9ZxYhW70Dzfkp4dkp4lpiHZSFCwj3hdRaJWM4oxRwxP2q4d5XgfaA++E84cWJ2KKS3JNKmILZpqWSZpMI/+vqgMDRIdUHcpcDecFvJqK9l3hQmxikWBEnMzyrb1IagynqfFYQU5ldtM6aPeHVay/u4F6SHZhNCWSCZiJL/VVf7oD7evLpmjPp4KwwMt0VbOIueYsZAlwydgzXRm5Z+r6n6/Oqnb1RKNcBsA5SzP4WbFh4sKsiAcMeO0NXqT+vA6S55vzFwm+qf0wtBjaIpRTIJsvNVAN9lTE6pmFktgbJ/rz4BhadUzF26FKzoT5rF2apa2DsHO63uvc+k4m08aCiSoPsHNd7UBhGC7oOGUJV+7VySuL/0qrBovVJVzAf4UXXKezrsK3Xxbg6VDH3abUBxZ1c1jXQI5xgDJr2oBBgDb6sKespVpHqICfJxh97prlIwE9FE0e+r9TQ1RdQavyJ7WRNRs6qYsTDVhxdo+U/NOg/QxJln3JoXaFoSNR5uldapIJOj46qvf5G6s0B5/dWcIEV1bpOKbwoyMXsJpstF5li3qs5/R/c5UCvNayo929f/ZPqXER4TgjtHlcyPJvMtlbHBwHDNt5JVH/o61q8MZc8sKJ4C7xPRZo70kvzvLwZh9ZVF0tofahrVtL6hAbQIHd2jvsI08JBieY8ahwea1fdnCG8zLJxv8m+rUzyqxySj+vNDcKQYDcBzzXma3RpQ11mmC7hn6prLiql+gjKm1s4LrWziIkxc43is5MdaD/KM7zmNirhm+RlNgC0tTJ8q2LKKeklHNX21fgES3IlSKTrbXVWxGsUfpHV3pV+0Th2tq8cyfO40M91/CpvabVeFB7mcjV9lALY7XLLLscNNnbNxJB4jgiBd0RMj4OMjoejCv7lFo6gZDnDcouV64W2j3LfX/sR2QGv78iN9nNqv3PnYfeSyfXzrRW4cinOoQTraDjp2ZwgciUO2O0Ze3oQ1L30du9W1Ow+798WB0AsnWXarPGs/c/PtyN6kg4Tiun086uVJ8J4rleKsLv1rkmGw22L+vmVvYrfNHo4Kb+xWwS0ucATj1HQMp2MCp1YXlgwd1a7kSglJz0/7qHIkZzf6pmYfF+J2knB3tRMhn8oBYllJHUteu/ZPHw4Q5ajtc9JcKyAfWVwSzWu7snJQ28Pmp1PLC3lHG47algiRRYzA7hTxrrTJGYjDZvLRPtblc0TbTacm0QP2pXxZkiBGLav9XBElgiJG4B22kxZKDLyGkXenllJ4jgq5Rcx3ObXAMUpNshA1aWSx66beQWvDKdmHF5Tc50jUqUnYwm6jgU6Y9ohICaO4gXxncVM3VsU1TOJzpHhsC7anUr7nOaZ831FZt+B/37clm/8uMr4fs3V13v+x+cztI8//fhQ+Do1bYf8lNL6mWu8HCGGsZrHPnkHk183I1pmbiUEwF6PCFs7LWHtmDk41MyuYQGA147BTcDMRxD+xSsyfhhsUN+NyuOFmuEhwM9jxczM5bvVko8LNbEibBcvHdoBk1pQ1rlzczAL6MJlARDWTFA+cyWIE2c2oddzMokqQQco1c8h99VbYXzix0HO+LXSPjcJtC4tlIt6OLTR2rrbh2CUfF7PAhT3gITjm+Q7Y56tJFSy8nrPg71gYxLqZPuH1AeElPg7uLXDWlMj6A+GZJT7efxFzsqBfk8qOZJz5FskeEuyWQIyOCe90yDNH6qYCdCpvfK3MEO4QU9glwvOALcHOPOjuK3ANlH2h5G7IbotieLkJsHQFyjvo7pVPeEkFe1Stv9eA+1nBDPEz+/qLsLNCgc3tk9IsQmZHZN4fKUy5p9sk3Pu1sOkSu0idPjf41ATXpzFM6xS0mQtiNkTph7rCQ8GBYE/wWa1ETDdOxPEjcJOid+eE8Sb4B159nWi+ubsk+KZvUf2YcoAxGsMMoY65/FQM5JpARcdTIrEV/XQAyivN452TrLO3r69Uw2kFnacmWBdsgF9Nga/2mUCKEikO1Bn2lGyZOjvJFf3SVfw/lSKjmHtlfa8wdHYs38VXRx0JitwGpIOMzktXSW7po7pNv6EEPhVDtuYvZ2F7Q0RLCM4K0gNmPhRUQ28uCqp/bq4JrgsGv24KLpNiUx1pTvhz4LsXImdqauEm/cJ7CBJYVTSDUGmZDpXx5yCBW1YxvDTxN1TVPdGwXBNNLwJ8cv1VP/BpdhMZ6aP68+auoIbbZkFQeW9uC+YFRdumJoBNms/ZeBBtW4Ivor8adFOkcC/sHFLSHLWPNl7mGAH5zFHYP7KPf5ZZs48d5hg3kw+fWrgdvGwF81ORJT+zK28YGQ7OXdkRcjNXUorItPTS5HzYzVzrRdpbbqYij0WZU65I3MyJPlU5T3MzPZXnWUdYma7m1qHU+DKvKt2AAz438yIVuoz87bgZ6Q5kboNiyfa9m3lg0cj84i8Di0zMsd+aOwPnGZk/iUFkPpT5SPBdmn2ZNxVhyvGZm/lEXdjNTJTDGOGfcRHfXrlPW/t6ws4NQP8mVJ9lfbLQ7YTqc5wa1ucFo1xEWuhfh+qxcAcwH6rPhL8Bljb1OJLKFrpXobos21poTkJ1E74DOKVQ3bPZW1AI1bnussAchuqRcDQDfAzVHa4ogG+h+hpH0MDTUH2d1bS+gakIC41F3+R/lv06LmHqcglTX7EdF7gZqq+GfwE2pwx54l7Rgl6ojmyIBTaUtcutBZehepKL1zp230J1HMHU5QimvgCzZyGVSKsE/8J5gFcN1Y/CNYBjI5BUBdi6HItU++ErwFKoXrKLogVuP1Q/QNAAaMOHtJCF56E6LioscG1d98L3ANsV6gXc89aLSm2Lqm5RlW3bdercadVhleo7duGvI2hbv6YprmnJpuXK6tz51G8gww0+0OotTlcttBFX9FwLLX7NshP1OuLBFnpzoXqDhmrA6tXPOdwE2mJeqHXlIKuO3rwFlqIysFE/4c7AQmOpfxr+Aji2PhVR60w/nunHF8r6QtEHEPhVCPiSqg9JDT9O9S4uHeryklJ/pizPKkuPS4d6n7r3wb3nssFCW3p8ggI9W7NHrjIsdCzJnuhMtyRxS++4I3SnLtJWL26rF3dEC3QP63+05p8wvlS0Ly6u699co1hIk/xwqVSXAF/9F2E+C4kZI4hg4W2oPgnbfvupb5/Cm6roU5HqjZNroC3fu11/gbZzjBQ9UvQHER+cqI8nnJeO3yyDN34GdDhNd/JD3T/kX2HR8wOsLDv5F7G9+T4H3/kelsnyz7Ksnu/aicLJP6E37OQf5Ygw/yBtzfw9msNOvoMUcL7NPUj+jt1G/lYH+nkkEJ38r3Yz+R+smDj5byF/BWlPMUHs5D/h//NjCf3mP2QNIT+SXmb+HQ42/4YFg/yxfi0H6Rxxgpv/hxx4/hBHaE7+AFNk+ZJMv+WZxZ18UbAAQ+rk9yTTnd9VEXcQn3PyeY7onfw27e7ktzg2d/IYa3Hy7G2cfBO9ACd/rZcGXdzJ14Oy1TjjdfJXKn1V9hDyl0GyF0Ehz2UyL38m57D5iqxm5E8ljZ0/CSLnA0rMyXoG7rB4SwSpxO3YdvIxVTsqiYS8LHk7eRlIc/KunDbmHfquk98Mvm1wJZJfR8DAya8hdZ1flXxPfiVoZF2Q5JFUcPJZ0XeJXUt+Ed+P+YyoiAyyk19gNnbySYxCO9lf9ikm53NcbnLyMZbjEsvkHIUjMrifS3D3kZMnsZxMvOXiyMibnK6gcvJSYnJLepENtlxGcDEcPbSPBf2e5jYrJ7kISReanK1W2j42uWXIrXHibnLryn0leFlFEMfkltFEM7kCRpdzEjg2uV1Zwc/JjHAuL8iUZ3IyNpyTB71cmfXL5GTpLScTcDldg+QObDMY5DZ79rGvgpV0/5+7xBdgrsrOzOR0C5KrYIw5d6bwCbITuVNs7ZmcPADmWvL7lruGsCbXlERHrq4qN5gtTA6DwSZX47Im9yhLyrknfb9X1ENQjDaXMblOOHpqH7fhKGh33NvlXoU2lNvB3AvWpHMDrl9yvSAzmYPLSXrDrttIXeSmMsecGytuIrd9uRFGlHMfinoLEpOMSO4veJFYSe5Xtf1SPb/54J6O5XrzdCL191M4A/d0KkX9U1yJuqfviKy4pyNdvpx+ID7rnv5Jaf70O/jrh8MHrLtYHub0nmuTU3S8XWzT8P1W283TOxix03bYbglPX4IvA3Rj3NNXZTuUOYTTrlJDdsA9PQ/+vAiwpVbpnlaV9gkCaO7pqQwgnFYCjLPgcS37BafNoB7Id7qnV3Z9cE9rUlA9rct6wmlDjNTpblCKAldFp0VpuJ5ucZeErZkl+8gHCe2wA3dP/yk9+bR1Tw+kQnp6GGSc4YDldJFe454uCS+rhJJBFim7C7CPBVE2UKs4XdOW/XQ9SGFTn5bRMT1dEcFWgw9uuG6hCXuWrzz1xGSe+pK0PcU5onvqkPYYI4zuaUK+j05nVaM5JTkvNdDTaJBaDPb0dCYoVVx/RpCJcNJr4WML1znRSG8wkac3mXrSOebh9DLnFekVmdlJrwopI6RFzZbpJaRHnTQzgZNe0IybTnPekU5Qcic9ixCxk57jYCg9r4UpHUU0zUnLDXd6BqlRJ+2ileCkDUyJk8ZUg5P2hRBB5OtxBp7beUzYqMUrVIYtbIQ818GHg+sK+oiwuR6mfjxX7hM8HI+m7SNu127PnRGcldSgmwge83R0z50L3lISq3PTQlwI4haDR6Bs5Wb1aUkwJ7gquIIcobuu8JqcTbibetkQ3EbK2nO3gkR2gkeemcHDnyxvu0IsChYES/JV5e7r5TDAOoBj9dwjxf3DT5HnHuP2xS2HozH7OEWzzT0JsCuSeHTPhX0ZxNXkqsa9UlwjeKkHj6a8WLnXImQrQL8JPt3hycNzb/UXLug8F3OwnvugmHtJfrpP8i/jPiruGQFLt6twH1uvnttDmtTDg+2GfbxIcBjvtlX7eJUQsPuutN8kLup+6GUk3UdXZlc8dxoU6FOfvvGT434hJOC5v8yhnvsjfUT3j348vLKLgDOswcc5w4Yujoc4FHaGLQ7VhiecGL6PWVCHFZik4Rk9f3geoF6IARteIrPkDKtB5H7wwLq9MzygNzrDQ6mQDf9x6Dg8CuLKAd5x8LalW+Pt4CUvF9lD6dANZbR2uIcEojMsiOsaFoN/v5THt1L9wVGqM2Sj6LyzzXOGkvx2hu86TB6OZDcW75yW8RuOZXhkOEFP3hmiNuIMp7KDNeSW3Bk+q0AvymEQJPQaPIbB41YIUpQbtsWWDjsymTu8l8vtoTTlhjqYHj7BpWQrsvr7PsICrPNYRl7PGeYkwjlcDqq+Imb1/QMn0cO1IG49qO0G5+DDTTYVzjCp8+VhKsBYCB7pADETPBaDx5JaWqfBw6jshA1jIukMMrPOME6vdIYJlXY2KMycSDofJBBB/t8ZSkR66Ia9kn2YgAaemKyhryTepdKX7suU2aMUDt/fNUXeyX959lKc5eN1MKk1hZe+wZ14uqWXbBU5eid9RVd7fAzPWzKmlVC6ATtql0bLxtlm2NlR+JhwtmihMztPzMKNhe7bxMLWLx8P+Gg2WsD5RVD6m6DMrvBTpWnhH9Hm8NHC+09it6Jg2Ama6DKwAbY7HRMeuYK+hf4q/zsHb3xdVOqPaeLXjgg/XQPTwq9RDvegppgZwjtnYM7VFbNAOjuU0pt+Wbj/ouishYsx/l8i3f0n0FYomHt1Tjj+TCJOQ5mu88/+AV/7otXRLOH3toWP96RSOgQmUkQvUerHB1JJiZp/jrA7hOsK75CTd35q4fK+gkEEtfCXQBxAKpMnwr2PW1iAjO0qqSaXQR7HhUEJTeqCLHsEV8l9g6CbJ9b5mwLnVZIi5HjjR/ePJvZeoLZZG5JvRM21fEv83h445W1V9hJ4USILEjdHqvKREI9yCkMJL6sedPQiyr3r64JgSvFTwU9Bvjq7wb8jEu4SsUmzuP+o9OOLvhWITqi+gy/FHAmWhd5X+EZQdDiq2w59ziyTvdKQSEtdxnn/k1mh9CWGK8oPuBUvo1Dnlm/Fn5Y7sjFVbsPzlQewV+UXWMPyMBz7Zx8IMbtlyzNafqfcF3daRuLZLU/kt7g8lhRSeYroT/lTxqnKb3i8LX/o15HEssp/7AqcxxdxjuVv9plu+SvsW2a1/KMC7ekCsLwrf5LlIkd65S2k2so73Gy55bzQjrgTKf+DdSsfC6LY75ZLYcvllQ8RWytf4KayfK4649DELV/CJZZPsSlVPlGiZ8KvcB9Zbsp7b/lav7UQmnXLN3K0Xq6J/yxfKe+GUqljlqXsKm8frSa37HElVo5ork4fQHm3LJWr8ry+zGHoyC3H5GO+HIefLM/o5LG8GI5ZfrycYZZ0y0uiWUowqbKl7c7DLS+o5dbVSmscybnlTTHN5Q3dtJWXYULLOf2yqpxXwn7flqaohS+txTC9LTPm719M1uk8O/U0fgWdbBNZxOwNol2vmwrLAWrJrnBr9vGml3fpCZVY2bzSlDMCrzSWblXpV47zSl/C+0aPqcQdrldqY1Sg1JE6R+lW3kZLd1KpKD0K4Uk+BEv3enkIXnrsibySHDOVnqWPX3rVy1CK/aWXAGEgRfxSBZ+HpTMhnMhDaulUPqBKcp/rlaoc2XklTId7pQvpkZTqiLZ6pQZ6ZKUazvNKN4ItZPi90nV41sKmipVX0jvScSptwQeXREmvVLCbYa9UFCdV2sVraUlumEoH+kUWMr3Sftjfto+SdL5KZRwclo715z+Ozb3Ske3tXiktFZJSktNYr5QSa1fKSpmqJM+kXmkleKwGzZKTl6rSMmdEXkkCn15pE2sHJdyBeaV1pOO8koMaXInjYK/kyRFryUeDpeRiQaFkxISXZrDAUJLRhVJUjGkpJorNoS/rlbCu6ZUScDleaVb7hyyVbZzBXVo4CDUqduRb4JRCjVPFntq+0DhR8ASEqm0rCxwbeylo5ymBt1DjHJFvC00n1GggU26hOwk16mgZNKTH2aiFBdx+qHEV/gG4+6FGC6Xxxk04amETOdYGlwoNWdtp7IRLAMsvNPIcvTS22Uk3OOZobMHlANuhRpGuY+FSqFGwPcEC+/MeB42NXVStG3Jn1jjgMNNCkw81SnYf1NjnuNFC29Uax6rpMTUtw1UBLQpuNi2gvv8sc9JAi7zxAXvdGDHmGu/0rQYeLBtTuMnGJ6LrjUnYFnSM8lLjl9PPxg+DvvFNGRlrjT90sBodtpaNNmx4Q1ZoG3CSy/P0ksYDqm/ZItvTRl8oPY52Gs8sAI2uqDWkwK+cFTcGHBg2XjRrZPehQxr7g40FO8Fb4Nj2TYXnAb5tSSTLG3hSCTWy7Pws3Aw1lugVS3bkNxbtkLDAEixjO6QFtj6rtMdq+CzUWAFvmU2XhXblaOTwBg20Vd0MR/eBlr9tbJDNurrHul2jGmtqjzX1CAeheQshT0SFj8hERpbtQsPHADXQFsHjtMNCu0NvoLvVcJFfB36EGnFaO26n9sYMoRlCeJm3wE4ljahyiSoXS9cHoCUIZhQsoGFnw75NJgF3nz1ge5M9tL8vz2HR10L0Ef5xRu5kj6R/mC0HDOqx1usZmaPoX3KK0sfskn2cc0rRDw5n+hWOa93+GUtY/4SZwu2f6iylf8MOxu23dLfWvw6/W9ikpG6/wb7A7V/J2lW/oFusflFZ7bLTcvt7WuD6eUrh9neCNy5c7WM7SLHMgarblydrt/8Pk5P9I46Z+gdcm/UDk5T9fdnv6pdYJPufOgjqT7FM2R9zfu/2ZeLQ7Y9YEvu6Xeu/6ft7UFidpbn9HxYBt/+rU57+F7bF+t86Hus/hqN2ges/YSjH7d+jbu3225ZJt48Oug9u/zaItNtnKMSq4faHMuLVHyBu0e9hC8Xt9zEB4fa7Yk/6zxJQ7i+hQ+D2M8hO9xfxx+b2F7Qy99PyWt7HeY59bEioo7/JvsPtryFu5PbX0Zvur4gmuaC5lmXTpe8Hf5ng4aD45fbngrd5mUXrJ4K32eAxEzziwSMqm6T9mGwNz5QY887BsVa6Soeh4FXarLmVO1kLqtyihONVcG/vVaQ9WcGthVe552yg0tcCU8FmuVd5Dv7v2h7rVWT81Ku8ypVvZcBKUnkR2gciRF5lFGC/6wSh8oav0MoU6nqVTx3CVNCQ8ypj3BZ6lV8OoLzKj7y0VxDa8ipf+ulPZdtB77SSV3hbipiVLb0UxSJUCnCNXmVPP+6qIodyA1454ErVq5SCYu3Lv2/lOIgsB49/weOMvuxV4PS8ymkQdxLgV4O3y+DtQhrqlXPxDJWGql0XrAleBWg3zDFe5TooocP86VUiwcNXQdEM9iouq5FXictoUmVGS3UFKUr7iOJh0KvMY32qMiczTRVZGPQqaQabV1nAJIVXScmbcSUZ/JZlOa4syexUZRFLSJUMPEtlVcaRKsvSaq9s4NS5sq4esSbHQ5t7Otqd7MuZ+aQEcz3B2r87ORT8B+M4OeI8c1JmipkcC0pkbLItuCO4GySxFzwK4dicfRRlonZyRaHdSU0aBJM6dy7upCHTeROpNrQqLBWmdYKqRatqWXDTutSFQOtCHlVa51yqm1ZDCHX5B27VcAXZukJ3stXShxspebSa0utoIbNlWjtwHaal+4zWNsf+LURD7KOIyVTTKsjHY4sLL9M6xHti68CyuqZVCgq1r6uY1jFcrmmVmftM68hOP6b1T8l94BuxNcJBTOudC4HWm0oz5fah9SldkdYE21emNUZlsvUr/zKtb/34pRud1p+wZffEtNoonbbwO2BatyhmmNaj0nwQ1j3abKbVh+M3rZ6+cL5nWkP5tmm9ct5oWgOV8gUzXe6kyzmiO3m2nG3jnlPtie463Ekfo4yTF7wvuZNBmOqlpWTTWpDeakvaTa2kNGpb2YB4S+EF6rMYIGaCbFe4QmnllOtmgIerWdNaD76vofNpWkh+mpaPFS7T8uAdTMuVbm4rruuX1gwXHaYVU6NFpVDVmoerN605tfdsQMGErDZOOqirTO4l/TZ5YKGZPHKf6k6e1Dm/kOyYfGtnNPnBVr87+ZU14gkTjjt5UwK6K176xwC10NB1se/gTj5YOyfYcFiqS8OtzoTqTiai3KcynNK7l44wlr+swyqgnaWm/9CJ9abcknnTMnuS6bFY8um+fHhPS5o8pwcstd70UAYWpnvMatMCfc+b4qnFPrZYrr3pdvDQ7mO6w/w1tczti33cwM150xY6/NMrxHG8aY0LAG9a51x82tCXc+6QvSlmFe3jRNYjpqdSSp+ewU1501fmj+mQ8/xpV/uPqRxde9Me/rO9ad8yQt70XvYfpg8cy3rTR7qlN31ixHnT2+DbncxHTNuw2N60w+w0/WOvNP3GFfr0R0X65eLWm461X5xOVNhPrEZMp6xQ0zdp70/f8eU+HdlB4E0/ZJX1wNgR5U0TAU3kAXo6F7zMBw9J03jTWEDKGR1eT+MimxuQyGBiYOprkZtGsDE3dVCj96ZrMs4xXQ9S2kA+xZtu2unQm+bg9rzpcvBYCTBWg7dM8LYYPJZUg2zwKWl3J940FSS/ECCkeSyV7aRjwVloGd1AC0xo6djumyxgBvtZs+yy+VnnZvdnQ767fzbRQzY/Oam3/yxzz/qD1qX5WRVc5KTa/CxxPmN+snD55ieJdvvPQtjr2EcaCQ/zk5Cm2s8sM+9PVN7Gf2Ka6X7sCnVhH3H5fP1xmUTMjwm+cT9kfnw5JP+JyGSPLa2l6fWSpeJ1FlpfZwQXMddzvWCjfTu2nEzoOqn4FK7vrjfs0njNHdz1Ktuc65zdDV0vwx5ee5ax95fyTHXXLhzStaGw1w7yVX8rdta7nldK3KVfz/C7+7fMvuk6qgRi3GH//FHfa7uyh67P2UH4S9Lr+/mynKYJrGebH0RL3L9NUeFTN7zY4razzs8bSvA/77qQ/hlhtcn8vGhSw1A2ka+auX6GQVt05ZIXS9mr9tEL3pCpMT/3aqbHoE2eZGQAQ9xk1mb1+enIJMHPtdadn2aQ/I10KX9auqP/ueIQwPzUVIU6L/5SAw7I/JzD+pifCznn/bkMil9lr+gvXcnv/E8lQDmT496ff9hQ+CkHccfqHPvhroUlGtT8HASFP7ScoPnZpaD+kpyy/RSCwhQ5qjI/W9gj+JEdgJ982Fu2aJhdcv8MzPafi/6w+xfnJO4vxoT8F7UTp790CZ/j/s3BOLl/s5LW+5OdvKVTu1ZZYLdtcSe8DbgCOA/ApVAch9QWuA+Cv0Bm8xtN+i1ZPp6ccTH+d4Zxo78Ku2737zR4nASPqh6Tc04W3L/LIPIieJyz3Xb/Gjr0m1xotfm7UuFlwPnvJsBryqr+BJkt9+9at9d/O8GnvDZLf9vBG8LQFrGKwOBfIYjbw0SQY3Cb4y/JTPHfoUwT/x0wGbt/pQBvXwqtBtMx7l85iDsKHrShY3w7McUT0NlCM1b4UOGdkBNZklXFSFZb+EiGTuZE4BadyAL3PZE0mtqRJLIukRSOKyMbitkUjvS4I5JAdyJS7Y6sSiQ/IpWAiCdbkhFfyK622mYZHsCJODj6diIRyflEWMCdyDyUdSKz8ioTmeEaJhKnPzmRKEypE8FjkBOR+5hIlbZ0Iue6iorIBEqkwlbOiWAmyonIooUTucFsY6QlWzkRyexHmvLAGKnrpcE9X+RK/9d0PRcpcCsU2bVj3onsKZxHNyOyQ2M7kS3GqRPZFlXKun6MHAvtn8S3IkeqzgG2WSKHsvoT2edo0omUdH8W+VRuU6l3RMbSxohMdDsWGWnnFvlAkiyiO63Ie9jn8aeoH8FfwS8Ed5zIN2e0kUdFPXFDGrlX+EFlanPUH5HySUT3hZFXu1Y6kSHSBZEX9n+OWZSlmIjk1yJ9wa5ULSLPOqKevGIkezJkQ7ME/S24tBF3ulM0mmfcSZuGNk3ZszT4uXDMM1tPx3SC/vyAMwfHsGQ74aepBe7bmYXe+z8Lx5cEdxoWmrmKopvAS0v08EoS7GPbvJY3zwnxHTj7JfQn4qFcuOjr452F/kpTH8f6uEBMdJYYn/y9qxWFTxVfI/nbGcUcAoe/xKTsIAq3i4reJqIXJ3x4BBxlFT4gsQNb9fDOi3LiH6fOT7NZ1XEevCDxuSofo5TXe70ipkJhzGyBYmQuiM+RorOtnCpAZyWjryRsvmMKD8B8qigG2nTXQNxXndYgpzP3JpRtoWxa+DOrvBOU6YEy9SJEJJ/5xo/ejPKfgXgmuQzevPK8ratcvkp0TDzua8NeEbI5iCmG3VnSNUstYJmsnfUzfaWuy5TITab4ONMnYlvfhha+UfwHwMYrkYkeFZz/AXdrSQTsED8gaedFpCt/8PUVGjszynKLtnY5ILNhFatFkt5kSXlC/IJqlG8rgZrKP6uwSDSzqhKJgI1bMOMqwp8KnToXpGt4vtozpb/+1HoTA+zQbu79jr6qyJui0sebcPZVObX5RD1qQpt7c3R490kluXhV+g+KV2oT4t1oRg0lCqc2hdMQvmqh3m/+SvpKG72QsNnUYJmNKsyQcQv3xDhQyjn4I2aDijpvdEp3nwHpPI8VHgmTsWF2roRJ13C76kLICoW9RQ3JDdF9UURdJJcXupNbp5d6fWjqfTOwnUUyHzJwvIUvfSwJqponrsLqWje0p3d7rxT3VAfK5B2mRQS1av9ZOJTP+WUi+PpVdFvotJVztqiYD8UESR4qeRX+kp7n7lGQTzXG15aakIZ0GvSZESM6t65v/O6v5VUNDZFL1eOSXm0y6nNfUNLHkIaFZG3e1XSdS/2rIfVFh/K2qoKa7T6hwZ7GwqsiLtf0cUe/ZgSpdk6tsvqiKQg4WhAx6H0jEXlLnWZLVPvUpPhFUjk14Re9OqWpZ3mkeY0BbQpq8T/Nssf87xSoxSHV95bUt5bUNdrqnUsaA0jdhL8ONcN883H3RlRQYe9V/GNS9LNzIKqPL9EuXU3Sc4x+8xgTNrB5DRoUPtSYidEy3jIF8YbXilGllmlys6qBf6VEMvqTFnNLjiI0ZWY0fR75+rUMLIrAc2rLVVWorsFYPBD+n0j5qjDt5N0vii5aZpbVmeY0aabmVK9vxV9raJOOH9Ecc6XRdXGujqXRcqQRck+RZ16FqBE8Lmh2IRnzq4HzcSOozOtqjjoEztIlbmb0vi8MGsxrvilxzcwxja3osdIC0w0WvaMtFYYq+2omp6PJvqGFdX5VOGqmOS1axYkWQC2e6/MKQ1JvhWq6z7uK56vbokd5Xa2GLQ2GFa2AwXhbh2jPTPwH1LKpKbSh7nyrOWg2ol9U+Y0zxTD2/XRW8WqLr3fBD2WkkZNV73zZEqarr6/6V2nuDLSkQUoNOm9FTXGg5WGtpJ+qylCL5ALJf4sHWNdo6Kh/zCyrrBuq2LLG+o3CGmpza/qqOWiWdj1Q8IAW8RMa/OmOkhT91/aEDg2dT83eV5o6r9QKnzElryl4aV4tpUkhTbxRam5Jbfp2LxzWIGdpRQUhR1PSfLcPcZz3fcVo9FQoZo8pzuTVvDOatebI1kdW3/50JJZCs15e8/+t2JQlDaorZganrBYraYnLE+9uaa7Y1rzxFlUnojX+rlXijEqgwZYS1VN8dCuU21/SclRVmyypbovquw+i0Yn6WRVMbzMiTPX1F6jjRTWMd7W2PWgdrVKEN/F3S6C8aeLTMuTdqJHSVN8Rt+LdUBovwoTo/uaVnygXUUe7FzMR0RpxPBK+GJWh/rphbO1oGtzIqJBatzYWVAx1pVxeC7CmqXtRLqXemdJ8dKkeld5QYqJumv7qXGqZ6mhROlbD3/xqlVbXSWvFnt/RcGVQrbHAOCMotEaKv5rm9jTaV6l9SbNulh79rOnqgvKXxDp6WkH7Jyx0mitmKcizmJSbU63pGtxZEb2tlTanObWgSelBMKtp4wFyXartNqmCwROpLYN69rx+fSB1TUQmIsZmT7zojoqcF+mzDD+3qtHtiIlvUH5/VQ2GaF/4j9TdL3UzMRt9Jk5/QZPUHF1oUex5VfOP+Gt3uikUtfSc1nhHJEypV/5pkXxTU2CJMhzVzFFlSP1pYZgvK0KkfLvQsNAcequhYLTg7Srdxr2Kq1E3r7XpTYzrhOa70bySuNOM9E9FEh8wJ/ZyquVrWzSZag4ckkwbgu1rGk1qwd7XVJKE9k6cMg5F6R7N7y9qctrV1OppQ7PEr1Ptc6bqc3Wa2nS1l6prk1BXWRargudK3dWvfYVFwm31vK5IeyUWfvZa+Gq3ZE74YmkfRZoHrbQPx+IrNMG+a1aeYd7wYup5ywXhMNP7+D4Id8XqtcSzfmoXM1Z9HjQVVZVVIS2oifdB80aVXuedQyz3jEE3udL6XlEfp+b7Wtg/NLnVBxqikHKixU/bHU/8zio5uxFIq52hr/2eq3Yw99ri3Iudv9cACxaOe/rfrytEDYWOuNy0ZpRlLVcbmscT6qIFcHxx6m66q1lHu92ChtpRXl+pnTfQEjNS8TY1sla0EIr38zbpBmZBf20qBUfLU0Er8UFEewPxFBoaO0OVVZOlBoKzp/n6Uov4UBuHm7PQUo1LsSVJXud3uVSz8CmU3+NEPV9gT2+htxTKFxUuKowCv2A+hNrXXsgkOaM1yU2uyZJrMkWWXJdOT3JF5j6TqzgwNsmcLIUml4JHFiVVk8yg72KSi1yYJRc4XkwmdZSclKi/Sc5xi2qS8zKMlkwEWc5y5W+SMzqITMqjlklGw28WxnR4mfSCOF+l0qmTSRpZ/EtiAC9fRrUJaFvhYBljfMlI+MvCGx2yJls6w01e23ayjyYHxiZZ171ashGUGcl1k6zp6DR5iUNqk6xifS55HiBUuFAySbyAmuQp13DJMmqaJiktqeQ/HZYnj3RampScukkecjmZ3BdCSebrkoVwzsIil+ImuctZcXJP5liT+YDOqODbajyhp5Y/p/1M8o9jN5P80R1B8gtJDpP85jjSJD9Voik2P01yjHsek5wEtByFOxZ+IPhvkm9cR5nkO2fcSemLmuQL92cmOcDYXLIvwnVFCClam+SjFMiST6rAPebykg8oTpqkTIKaZCdI+5Y7zuQdL82L8GyoeY700X2MpmpKD7CJ3bt8RQ1Vods1MV7cxFtes0UbYtbfAvtvzYaaKEw0G+HNULNuvzb3ODdu7obnQ03sFTe3sLDZ3OHWPI/v+Dy+45tHyJg0/yG+1jzmVrxZRsSuWQpXQvkbMrhBJq55GLYIB7RUcxK2YIxbmeaUE7PmZ3gaar7rZ3S0mh8YsLx7DH+H8leIgzW/ETxq/nIUma+hj2oh8Q/hw1DzPuzbH2USsfkYboWadxzO5us0Y7ODBFATAzLNAbm8kPcwbFNoWKI3n5GhanaRE2z20Vtq9uRj++6Jw83mYrgRamYAWca6c4fUZDNFBF5lmmnAQngYaq4j5Nhcs2RpbiId1NzghyaK3U2ubZuSGGiuEDQMvqYPYT2K2oyEx6Gmg+RKUzrrzQQiEc157g+bc/yCucpmVEnGGd3NGRmSuXvgXDyWkH+A2CwXN7E5Rc3LoEksKqmYWExSwLEZZIhikoaNIYnoxoyOHGMes5cb8+mubiwiqaWYIysAsbWw92sf64gnxFCgcmObCuck5BNbDnsz9rHKxbgby+i8PmanI9c+sraObizJmawbS2HYILaAWrobC3TFYv8kExU7UmHKXOS5sWPJX8X2VY8SxoJi2Lt2Y4coLLixXX3YY3ZzYwXuJWJFZk03thX2KcO2cs0LTTIUsRtZMoxd0dpu7ALZETdW5eLUjZ3IL0PslNsgN1ZBmCjGwHBjr7qpjj0rSlaLYn2Jd8UemCPd2CNqfrHb8ImFdwG12hI1inUCgv9hzd2NfUk+I/aNyHbsB5ni2C8X9m5sjOKZG5tw5RH7VK5Thd9kOSH2Lnmk2ChokQ+kBpw7rD4610l5frrDocFy2Hl+sTCzSPByz8LveYKH/4ADomdLBL/OLXQnBrhzRMzMB+HxsYWL2wSfBxb6dsGxHxfqFi4sWOCd5JVg28L1hgUHoDm/Pr/cnYCx8mphlBzd/Uv+m+O9SIjXIlkvgOmnL4VkFL4mOq9ggaCK/EvZFijUepS0z3aIPcvx3+W+heZ5nbDd1lq8Bz4eTKj2CrHrJPR6S+wn1dujMlu7fOo8ktw6+e2TjhPP8q1qweo9oEdJ5h2CkHSOUJwM8otKOsFPUTIYfJLUJSRyc1QzCprpXipZwvFTgusuGD2y6I2JeCXajVB05xHs2T8i0kl+3yeDpBrrlUTXVOAotEpuq0a0t1tS0q/k4h6886PdYSyHY6LKP2rilTdBuRGVZyiwM3lVvKPs0wrTRksrQoQQTmtqYQGiOQ+/YBzTPuaRPuDaDYmNb2Qs3Nji4zl0XFexXjxhH0IzgHOv6AEdxFlaVZgy7yj6/oD07kF04xPF8NFLXav1Z4VOsebO+AfadWhEd5kOWEgrWKPCdCinf63K1AWv+O0LjCPwnOU45S2LRhDDOVILvTjCpggzzwTr3xbOU7CVptoDtLU1/mvWRHkoPN8CUKCZLn/9Uqx5fTcVjUSItPqskaiMPijcPL3fSZDFTF1Np46Wptaf5Gl+wZihOgkKnFC/n1cDz1KlbTr/vDrQxaPoFlc11GxN+s7aiGD9hx83NazbUK+ktu9AUzMZWrgr8j5rhBu6XolGdw3lLKVAoLnc1Q39p/SPLNN79yJPd3fyBHfXQ+omIe/SXiKnl1UJ8iVWkPNISBousSZB/MQGSgkJeRvwEknc2HiJtP5ZkL+IxKL+ycgLTSIrSb9ETD9F0RdIxPHNmJhBnCQhSXgvkUC8JTGvVOYQOklEpDeZcIRl9MFV2Icb8hKedDMTNclxJq4wWe4lGsGjjpFkL9FEfiZxLc83iZa0HRI3iPEnTpXeiQT+Emesrl6iIjn9xIU+nUs3M1HFg0biEnbLS5SC6u8HCclcsZc4kIxm4p9klBLHeHhJlLnP9RKym+YltriA9RI7iDUm8mh/JPYkAZTYlfuKRBExzsS35IASX0G1fmHOvcSP6PUu+CYoFdPESP5NEhMpciTGeKVJTJHP9hKf4ZqFz6JnV+5nEv2gqD0l8IKeQmIo0ryGfVqiE1AtkD1KPMjHY0IqJYkn/fMoC3DTN8RSHUkXOVNcPeTlPsKZynjgVMYDp7rHnU4t6+ZMMSOdj2q/FGW/5Ex/uAae/mIWYoqKSz4mhjamncf0VoqN0zvE2pxpG6nvaUfp3cMU5Wcsl2eB3fsjRGU3p9Mn7nmnXd1xT5+VbA9ZnXxc6caDdF9g15zpQAm+ogE5PbE/5jPavmUCJHlxnJ6hdO5Mz1Hjnl5oiZ5eknx+Ud+vVPla2Luzj4Zl5JzpNTLT+SXEZSwUQW6E1eIyfbolXY/pdtjwyCOFgqWMPaChWLvIuDvTPWm4I8v2Zx9FuIx8UrVIBgU8YE/iTA+5Y5/+w2+KMz1CT3ZaRg4in5JlkqlMCE4XtAmapoNHxjJR+YWwORUk0yVRMSvGc4rNwnxaZUoH7SRXPtN1VUN+6KebMC/5NTh1B7kvsnKxAjg1amuEnPLryPljLoTEoyqoBMocBMoG9hHHwYODHFo3lN8IEgv8O07nIXZ+UxuCTb7kc2qenHbXy4IrqsIKVcivavMbn5PgT3w2eCRkPD4eR1w2jiqCicdwU2biUUxhxBExNnGPbV3cKOwiHhvnXNo+Ilijj28gnWjiEmcz8VWhrQjm0IcwcWkVmfiStqrxRYnTxjOIJJh4GmU1E1/QxjsucyPxJDY34scycB4vh72afRxpSx7/pw12vCR77vF9hIPiRXqMiRfgqk18D4mi+I4Syst5SxzH4ybekpeS+E1QzGbwuMZKe7yhrXkct2ImXtOv8mISv2RHYuLnshwSP9P3CrJy8RM2p3FEGU38VbjyP27iL9ipMXF8jZt4IKIVf8aKSbwrAeL4UxD3iNqFiT/oX9m5j3ckARZvS/4uficq3IrEf8rnVxbd4zL/Ef/WKUb8i/5v4p/a7McnwdtY+/b4h2S94iNUFkz8nYOAuOyTFuSa0LkuIGNa+EY5sfAFP1/4pd+5hR/OV9zCRBupwpju6xZwSOgWPmXFo/DGpO0WPqTCURjJE3lhEES+IJXsFobBN7Rd7eNZe4xCV2bYCn0VoydhqsITm2u38Mj+qnAnlYrCrbQQCu3grUl53cJ18Gix1yuwbXcLVxLNKzC1uIU6u5HCBfJhhXNLc7dQxeSEW7iUi/SCnE+6hRPkzwtnWE51C0foURT+BXjHqAS5hTJ2zdxCCW2Qwr6sdBSQVHMLe4wOt1Ck/7mFgii3LYuYBTm5L+ywqXcLeZVnXboqhTVkqwqbqvKGVEAKOXnaK6yytyusaN9UWBR9M0LT5rGwJK3NQgoJYreAyVO3kA7KZnkHuzkq4G3CLSQCusyHs5Rxjk1UIU4fcAszTPxuwUiFpOAqCU8KsYUIvIhzfaL6JXfRR02WLfSjGQSR/ShWV/1oEh1ZP7qA88BoWh4Xo9gK9aPr8kcYZcLzo5sMVz+aw2FhdDn4siInhtFVW38/KrFYP2qgiR9FidWP+niM8qMR6uRHHXqRH02EZyxEhd+PzgcJaffvR2N2mfajcXlVjJ4r2wvUS/yoBqofrYYpA3Zs/WgF95fRM8FrOq0fbcIt+NGb4K0V/HNlx5kfbahGuzAKliA7NeDV1ELHqVt4sGeBORsQ3LLgr2pBMQXW4itYX8/Aw09iVroWzv5TsG3he9wC13hKIkv0/D3wJMJHUvK21i0c8s09/iPpYz4dkY5RIYC3yxas5C1ozfNTQ1ns+MCDM9DWyam/RkSeYpmfHeD2IfDt0sKcUURdsKn80sDta8WcCV4IUovUAcmWWxam3yxYJSW3EwVhjuL6CUXHAAkl5IBLEcxwhtKAlIFSyTsL2icWxFcA1GYZsi33wPHB6UKHLP/EqIozeCehQofabxLxAoJLlZ05CGUeMwpPCA/GpNawYEABnIexvkHswa4FcxDMVRMYBxIui3blJwvV5O6ymn9Cmt7sNnhbt8BXynFDCVw7L52E78hvAM28+w0LZ+gq5uLbwl3InP0BdXih5GmJfRJb3Kc8H2q2D1JwNqhOSaXIQUP3s0K4Tnfz5iDz7wfJqdVu1MazFjxCEO+irYQWLbxU8zVJzYyX1XxQex5Cb1M2b5OeX0DryI8WNXS25Bs1ui2nrtE8LkKjO/InG/0nhCPk9/xo2c6kfvQYVsSP7sNY+NGSEA5gH/zoIayIHx1zaOxHJxwB+tFPNiR+dBoM4DcNSc6C/egoGHcfSkK+NPzoVxD3jeqkH/0JTy38FbwX2mOYueNJ8JZDQD96x5GxH22H7yzsCL7gOjU6CNKS00s/OmRD5Ue7YYb8sxLoBVNVHzw3+adlKvkoXYDUDKyAl4pjStNLyetkKhbEzUmhPDUv20MpjBV5qVnpSqUiUq9IeUL3cR/jpdzgLxM8VvSJ+d5L5bTvSy0zqXmpTXk2TK3Jhk5qXcZuUmntBlNJzsu9VErKBqkldnqprPyBprBO5aUW0YXyUodsIVP72NzyUmVtIVPHcg6Y+ic/i6kjNnKpvPS7U1varKaKUpRP7Wq/k6pro5hq0Ipe6kqOFlM3QcWwyuelmsHLqQz2pC6VZxUJfi91zpj0Uhfs11I9bfJS/QC9Gzye0RNLyWOzlxpqt5oaBJ+CnVSqg7PF1G0Qd8e5qZd6ZJ30Uk+Y6PNS98G3h+DxEzzYHXmpr+DlW7n8BS8jqYykPoK3N1lLSr0Hb5/BYxr2odU4eJvI91Shw8bHsh6WHt0Vuxh1Vzlv7ebCP6HusoLY+etu2hWxu4AYdDcNXbss0N2U3dN2sbDXzXJw380ILtIKXdm56kbhdbsxzD0WHqB2d158UGfISXDnhcPWzkA8VKdnNwtup68j0E4XDqODYR+38wS71rnnhN/tPOilLdhBJr1zC3PUuWOT1XVky63zI6O5nV8OjTvi9jrfKBK4nU+Jj3em8BydMZqVbucDvdnOG+q3nXep0nbKYkM6/4LHkXI4EIfYOQzi9tlyuZ0SR+SdAuxcpxg2U/vYxTSR29nj1sjt5DFq0dmRkmtnS8xLRz6V3U4LDqVzreI1A7I09HIlWJM14E4Vs0du5xyZYLdzpk8nsH2dUwTzOwl0ajuzMjfSmQlHLb/YicsPQCcm8noioq8fXQyIdIzCjtwJdSIi+4ZosIkSkdtZE3XWg5eVcJSmWuU2oZNj/+F2lnSd0FmQKblOUsfenRQtbjn+ubyF0akF5nDTQmcxZuHTwAKvStBYDuMpvPhJcGcMxq1nob/wqn+EWF5T+Bk4WtRPERKvrpN4XB9/Qczdk0Bjno93OQu/SdGcRYXdBs4+AtevQJwtEb75Jtz6sfAFRG8hwU9eVJAyzqaF11b+s0JZVuVmCC8+K/zPwlVl93kCXFMFRqsWTlcs+NkHFKmbw3/7fX6bzwmXgjhZFSRKjj8x4W1ZONwhk0dS88ZdMF5/9HFoYXyX35eeVBsQzVtEFMsCo1DSfWgSv0Ub+P4GMRXRcBUyecUjYurU0DsXhcsUwzzQcN6Nr5iSwqr5fQ9858bCAUTwCjf6WKYIEyWZU/w5lbz9I3plBEw+KVfq5CwtkMrqGTFH9Atvj5q4C1DKTX9ZuEkTeXXRMkYHEHn8OSjjHW3oI9jm8kUQcri3pO5HqMcdncP8tYhemRCeTO3E8xyOHYWcjT/Y9aw8I/tZn8XMz3ow0dmIvjicYfn49LKreHbecnN+Ns6w9LN2Z2HhIuYT/CwnJj5O+0BbsqPHz6a4LvKzSRjrrN21WL4iu4Air59dlw/47BqqlX52U/9uyO98djmIy4XbFnKW4GftYLuwj2KQeAF1fD+7je/1rPQo/OxReGThP45c/OwxWmx+toyWi58tKfFDlD/87EGQ+AULmJ+tYvvYz+Lo1s+ewv5kT7hE87NnQVaVIPVm8HaNn/hsS+mhEeLLA7SfvQpbBibbCBtKXBeFHjAl62fv0Y3xs4xl+3i0k4WfvRNblL2VO/lshw1NViuNnx1gS9zPPssFfbYb5NqXU/hsT4gTFko/O8YRnp+dcizqZz8DxHcV7I2zJD/7wVzkZ0cq4J/2S9mvAO+X0wAP28d79vETjtpVvP6FfZh6sGzWP1AmxKIxce8B3hvHoPVp8PIJh+fVJzoOrY+lYVrv61i43mNH52H52S629S4HQF59iLafh9FoIgdSUsd6s+Uc6u3gcSe11voTVgcxBW25ifqDjD3W7wP8hpiWOubUvHqNE/b6FdZv6i2V7UbnsPUz8RL1imzh1E8x8+TVq1iU8uqX4nfqF1irqZ/LMk/9AJXUeonz3/pxgF2WDn9dqrX1fyweXn1HDtLrebgoLHjH7WNLJ+V12bnwMAHO2yqH7vVl+KB6LvhpE0suXn1DtgTq60p2Lfike4D6Aof92Cy3TFY9K86vvqhjc4yfgxeXemx9RsnGVN8oCu9efR7bxx6G3kGfVW1wjH6zxcF2PcKJx82OlFCx2U52BokKr+4inXLzLYQfzjNufgWZEpzCu22ImzeUkW7e7XC+kc2/mw946htcJt5wfH3ziT7gDXY3bmxTh256loPp2g4cuoGLucEf380ripw3Q4p7cxv2bbJ39JibNlS86SjPeyagmwfSfQyPQzdP0rUqvFlurvsSzoW6HaYZpzBlyDobI1u37oOdIrr1sGc5sQaCMd0rMWw1W/nujR3T3RZM2jWvTVi/boXCnXEGXUBhvnuCpnL3FFaiWw3bBM5l0JAzMPvjBYcR3UOOT7r7KAV3y3T7rt0HnYTczU+xcnnmC6fwHY7VQ90dZFC6lrf+CnW3+dAtYP6oWxRzuItRke6enE5W+yj0V2UcxFSf5dey2pVqaBUbePYx0HFt9UXGjqsyzGiqbazDmOotvmtM9Qlt3OojlkhM9UECNNVfXDVXfwQDv9PVLwR7TPUP9s9UP1DFrI6Qlaq+Y0G4+hbesXCKDFX1k/2hqU4YPaY61rFnFdvepoqlFVM9hi031TKnn9Uj6bdWLUOYs48dGYmu5vVpG91GU90KR//sIzihrRakj1rd0zlotSEbDFVpkpvqlfRMqzdSQa02RZ9rmWCuVpATMNUTNkimWuWU21Qvg/pecKhZPcdtdzWuc87qjP6Nof9pqlHRaF6n3dU5Gcmu2j0beSTCUCfCXYep+kFVPeyDmyq+okzVlXBWdSWoynLQPLngbTN4bMirdnXdsrqmmsa+Q3WB8+dq4MG1mg2Izl2IqWb4xyn8Mc6dwg4ahoU8S65znsFVKweJDL0terNTKGLDslDQXUxhT2qBhV1dNxQOsHjsFGgSp7CvC5PCsZ3C7rPcQBTKQQL/5Hi1cBY8KsHQGctHe6HKgfLoHFedZnRp+Tv7qAZvMi5rRqfBWwWreWZ0pqPp0TXWM82oqYYf3TAUzAi/BGYUNOCoht7vqB783Ageu2hDj/bsmDSjgmBRhqxHW9JuHuWDx47Q/ukYfXTE4DSjMrO0GR0H+PtcSoxKdmoyo4Og0IdBFmMpGo8mQeSnrJKPsHJkRm/Izo3eA7yRsv8Lx0jVboJ8+/jWofnoJ8D4DZK4Z89jRg/B2yNXI6MnusXoVvcSoztuf82oHZCio3RfNHZHgyClV4nIjYZyxzrqorhoRs9BvXrBo49CbKFBC5lRBo7FjJbYYphREhG3UUoDf7SAN/VROkBYC/t2xI/Wg7cN5bwZZJkL/l0JHiaI9AJEmasyI0fubEcJ2ckYzQWY82Gfb9Hgh5gadIaJ0Yziup0q1OyMeRxeiVngNEYWuqkLC78dYiMWeDs3xC5eA+0kYWNmd4En34I/FhazIRT5+Wf2DnAL6OhvguagKXhl4WJXwToZHnhK9IGYswPQF1vEfE2UzaqFnwSdr6QiMhQqD3ZDBT5csTBeAFCkuXuw9n3eK3zPTYlIkqbZH1iYrBJMvJJJL6Pwmz6S+TIlc14ppL9YBKX9AqyeEf9esnAJbPedHE13R+jnChOfXqPWbX5fI2137kUYxJhhTuEnC2cou7d0DDz+VHgfGFmwMLthwTZlcAYQ2B2TgTP3C0aMbLYh3TbNsUulU0PK1QeQ3RLVXPrgn38ulUqTSJyIwz3SWCZf8/gp+oAxUJ2S/BfbU17GwtwmyP46yJG4hfP/QNsjwtsjJ++C8N07QB0n17DwmfZ/ptGfaX+nA8WfaZjnR0CNSJ+fbmhTZ57ud6du1dm2sE+je+2p8qVqz1FIBnA21BN7ixaW1At+6XEbNJs3gECJHr2A7uRtXgqSuzMqqi5btrs3w7Fv+5B5s6OYfFwUHMsjuEczUt0ueHZEYh/oI9S6wRIO5oEmodY1LCQGhaKhFtsDDAldhlqXYF/YObelg7LWGax7q8KIbHFbgY2i3VDrGB66VbaLQeuIS/3Wgd2KtUqcF7WKyCe2CjCCrb3wVai1C/+GISKbXp6NTeuPDVCLxb8lodHWF2bHWlMiPuG/Wkiftsbht1Drwy7qsi7UerOTamtop7bWq8pmtyGh1gtniK2+ZRZaPS5xWs8If7S64ehcqPVkp6YWVsBbD5gka93brUergyhhq83k2Lrj0KfFhVdrQ/muI8uNoZ6DUGuVHZB7FIfxay1b3rKVQ2CwtQSHiekfi5+BZ2ylkYlsLcAetVK2JVo4K2zNcanbmg1Hbc4JltlW3C4crRmlF8NMeiuqpHwV2eNKp8WK3nKF4lgusxURkxvFq2JhlgvdgRxUDHYRbh7IfqEZFILHFq1sBjswdmaQlwmMATaszECi2IOyLqUHpQB9P3gcygLR4ACzHwPZxzWDc/1U1ZX3AOt7ZnCqzE/0QZ4rBpUgtabcZwyuw5ZXGbT0Rf4OzOBKLw3ZkRrU9fJAJc3gXuvP4EnmngaBMPXgLvirwymgGbSFP1B+LwoPBXWHPZCDikFfsCc4EQ82mEpwe/DJeaMZvHM1PXjj7nzwAV84GHHdPfjTL9+CX7Bhg9/gB/nUjr3iYiD2IjhgcXRiPfxDxvqK6mKL04k9iwuKPcphTuwJIZXCAq6uYg9ImSBealmhmC6Indgdthdif+GoHbWxIJtvOc6Mfdrh6sSm8myDrOm7fUzgj2IjZEhjH/h2ick+QuydxXZg1FYuVo/MwJfQ+8ALe2R9yPmaE9vX1fzAkXbBQIZtBgnxA4O5AD+mDhSVcP5AtwhmMKO4RfEBAwnRmEFWa/5gSZ9SAZGTSEWZQTpAXFDq62L5B2tSQhhswugONoIWXcY+oRnkwlG606qW68FKmNOdJIbY3KNZvM0UsvLccFAOeys2bk79wHZOyxOE3beiwrZtw6u2ocJrtmHDxk7nJlxM8ulsAhoUCfsr/FLU33Wii5b7C3tftkjhlW9iu+8EP/njmKA3uwHamODBP4I/ytRTxLXwSHMrD9gi69sYOWUOwMjxi/dK2HzPg33ySzYnlrUOjxXc7wsO9ese4UfC6Yz+TyitLNGtHv+348DljKClWrhApsuUKvcB8jzFKYwAf3yBPAXL4IXnRagBNV6m4M7zDZ+elfYZv7L5CBuV1veyygGqeX9/+pM8l1MExy+CFMjM+CDm7UAKD0Tp/Fhwqj8htflbtXATcj4aQg+AFqAJqIJ/YbdwiE4SpKAJcktA/A/9vaYvkMN0gJvU/WOZkCL/KECOBs+RZ6JLCTf57sevQPDI0Hk7VM3biqdxNqHHwiap71MEP0bpnEaBmMU//TlQDF/NDoReiKsDVZUUuXu3kH6BHDYW9e2c329KhLHOZpOFWN6iEn8DZ4EyeH2o4syVgeUvSm831+Ev8vXXKFr8GFDWOxWLn6r/qP1/VIITEQEixWsU8UbDIqpu9cowKFHEOE0Wp5hxWukL8KC0yrtK5Y7Wpec9REll9kj/XwgjJjgLnNzoKzntMWy8JTqQ9/kgeKekzgSDwtHCxlE3WiZhNwWV3DplcncpxQappyjEBnXfiOiLUnog1Q2XJAqi3IRW3vAVrItyGo6pN8J/VNmz7KdFf1xS2BXmocoA9R5fha7uO6FB3RdaJ6Whda8UJ8v6v6lSUrdDjZAJ42EpqXqM9O1DYaaKDci28cjkQ9U3aLeCqPWXFlwQaoewJwxG/gxd1I9AlF3IPEOV3RWqP0NJZhrgX9BBZkjxxiEDZogsacxAsV3GmtNjeGfnlQlFmaesfdIwMQazc0N+7h4V3oV4u9Bll/SSK4QA7pHa+sJRqcjGvaUO608AymY2aIN1qnFA5uuQ0WQpm3mmu6zT5Ou8u8+0hduCdOvb/KDu+EUCa+oM6xR0ndlkHRq5l/fKsaifNVPeqtOnNUmukek71TyYU6Y0sreiTvS8KDwG6bIm/CtmDeeTrj9H4f0ZOkOSX3x0yMLuFt+movcWBHKrjARvH1pfU5zkm/5T3ld8S2q03mueWCLaJGjUGMPOrZCc26CMMTDeIH2MRnhjcnZSdI0dGnqHmjm7ir2H0t4Gc8QOCTiX+nivafpSo6f1pZhzxRAfIw/l71buFUuRzcOqwoyF/ySdV1sqMRCG7/0VkGeLWEBA0RUrduyo4EHFBWlWsKAg/viT99ubSTabMpn0ZEqCvpFgZPaZMv0cCKX21f6izh3r3Arr14rG0V1b/yh7ZQqMWhpvNfpEilofTCmaekQOhOYo+QC8V0SVBq28whBaoUunaBXzwLfJ0Fplmtp8MNsevCnvrpJB+RX1zzlNRIe0TcBg/WtrxlXzVigtQV0m/+kbyngFGn2BCk/y/Qdx3BTdqcC4NKGm6BKTlZ/XwAs177iakjManFFnc9UYJTX1566WPqEeMso9l0Za0CheVc/jvjPmNskyDzGU+T657kOhPOTPQ363Cx39hTktJETdVyJ6dT6rCLSrm9OaP/2tyJq6u9p+XLOj6GnpmWa53mdiMJWS5kXK8zyWWN8XuVtn6i//FIcWTtLCSfp8HiqZI4ppqAotzZOXi0rNiExCpuS9vmmgJO2T1LQXMHSSNLP7re4SCFZUqDqN+QQNM6Mm/CSlk2WiqVOgm6FJ5slkU91ohtbcpLtuQvd5RsoKJNk8EF0pdFP9/URtp6l6cyCvdhc97YVeF+128CjmzE60fU5t7WluF9uzcP2Z0jqPJaa0Bqt4aVUafUt5mFhMaZEzrkEnsN1MlxYUA35FU8rC1FzKsN8spTkAmdIcR2FTSmEOwZSQUjClWd1MlmakKwxtxWXrTMmYWGlSpoxKPveiJY9tfclFs3nJkc60Upxjkyndi0W7dItmV1OS6rVSFXVjpX9IJJiSjO+UKuyRTekq5tl9e0m6SU3pAu1ipRKcs6Z0LrNKpbPY5Jp1TsWsjzZnS5zSsaw1lY5Uq0PdgJWKqugBFkgMOqBXrLOHTrTSLnwGprSNmrTSlu5ZUTBN3TYi+v3JZFDpl+u+0ihG0FDZ/cD0jTbrU+sMZIiy9GUP6ab0KfsepY+I8j2ZuCzpBFWSEEpJiiJN6TmK9ySJuNKjTGaWutwMlzrcTpuS1LqVWrybmlJTGD/o2rnUUAvWOX2YUijJwDhs3g5693at44tNNR6H9yFu4PN24jNwz8ZnYaNA6961dVKoeEbDXsM6YrCIT8HHge69tnXE0xpfkFxgPCfOiPiiIsxFYWnlmeGR2ImvyOJxfFWq0+NrUfR1bgSc+FJs6tw6yzDtxvNwpn5ho9OJN2FribdVUAdW1ngIw268YbeX1nkRj25cPNpO/E0cMfGe0nQxMIOSunXrPGNKL/6lbDgsOvFv6f+N/3DKcuJ9WQ2Mv0ufZPwDHpj4J8ZZ4n8o048PlXIUK1r4CxNJfCzaibHUie+Iqzm+K2Xv8T2Vj3lgJ74pv+Q241uwlcSPRNRjIXGq8H0xzKABEKcYEeYwqtkltxNO/Eq8KPEy9y1OXBat4+dw2jvxktC5sLOydWQGO34neM+DsxO/jihVjbkgf8OXGWUQ4jT+tuyfjWo8SYzu9fowukWWYoSmbTOqciliRjdYah1dcwQd/eOca0YVmOHMqESHHF1IFePojEE7QqjIjE6U/pSLKjM60uAaFemWo0NugkIM24RxXoBCTzZWR/vcuIeO/tqDwEQ4FducCKe5LggnJaAxOrBVDZOI/4Yp21PCGXgLwllupcxolye9cM7SOMT0TZjj6SdcpEuYEZyR4TKCuWE+tj4RLvGqEa7ZrhSuKHRVOihHMmcz2oL2YYNhGz7YGoahPbOH7dj3RNhhAIVNptSwRSjXKeFzbH4i7JJF+MhlVPgGlcJX+KbCD97Fwk9LnFCKicN3O5LCbxL/2G4ZfnElGA54TQx/JbwywlhnOIy5nYlwxAQ7grsrLAjTLbi8ww35NwV3Ze5uRB8Nt3kuD3diyxNh0Xa08BAi7kcRuMAJT3jVC0/tdBEeITQWHseaE2FJ6i1H33YiC89i9YmwTEBY4Wk0vMSWz+iHnhhWebcKb2znD69ZY8J/dlYLa3aomNEXEhDhLWXfcZNiRgOp+Rx96HefoW5G75GjS0kz6klJ5+gl5toVaPSquWH2QF3oSebqRs88tppRN0r2iMoHM2qrL3YkmDRq6qPFpD5q6FVsJHGpUcjMParbLbddGC8YyLUFjLw4tXkeNZ3aYuTkEGlzamnEJmpzUVhWVuFrGWaR2qpMddZWol/r0oNfW5MhploQBWJQ0qm5ysKxM7NT8+k/Ts2L/sclS14zWJKqzYpfrjYjee9aCgSdmoTcawnlMBklmobprzbFE5NTu4zCyvDu1c6jjzOpfq1dRF8lMcbV7mQutnaLeEKtJlz+RRGupba2dhN9VaXivLbDVFTbVrl7EtGu7TKn1DY1E9U2YLqrFWJ2B+jUjmGqqx1FGZxGzknk7EeObAHUitEHurmd2hcMx07tXVNarS8JhtqnVPrX/phHa+OIwL8I2Tg1KWWvNUX6DrODU2vD/1erK7uQJQYOA3J7ldADfAgb1nmLUH6kczm1Lsa7atx2m2IJuZz2j5g6AiPWkyDOmu6her1jHR894oGD0vDAFVNKMIXciBckZG40wE6LF6RQcx7onc0LZuFXCTIyqBRkYbAJ5ni494K0LEAFOSZhL+Cl2AuWaR8vyOtjiWXVQ3M3xa7LUmcgs1xesMpdqV02D1MWOqeTFr4U8K62LfSW7/hZrwMHhDQ/LZgc8O/6lXjDJLC8bqF5JZ4zWyT2hfyryvAe734NuHRGUtMjul17bcQZD3/lysK9rHJpWugv7yuvKaIfbuF35y3cNRb8XRPvcdPCrzcAmJipC2J9P1lY75Csv0pwEehs7eCv9lXWrTIlJ7f3xd8fcDMLP4QXqLLnigizZFkfEvGK6M7VucgEEW6PiLcqUgw3gAugvHxMjJDi/Cmjnyr0grq5pV9CilXi3E8qXIRPzilmR8g8WrgYElB+IaAxQ2bzh2TwXZf/VH6gN3emkDL+TkvxMwqhlm64BExcAvevBKmT+76uOBT1fKJsugSsCNr1wWLWht5ObgHYAoXgG1o/qyBa5Ad03WNawPPVi7rKuykSLC0qRE1lD402pHpA5oY4PaG7oWpc0UDePj+977aivCjRP2A+T9LUiPKuFIVW6l2KvnX1lwbB8SX1EVF5irx+iOdm7xWg1q9UBW9UaIJE99DUOQ5ENbqau9sTpJJf70pKJ3UMVXcP6FveFxQxwZgQU1CW4OHl2gpXh/okzhfVcMv0EOfiRC1GiJegbRfpMosU4a3R+7xL9ac16OUckokp07Kv9PJD8HHfqJZT2FMxjJoDepPZTqsZaIA0zfel8eML18GaoJK3ZvHXPkUzdfIBabwxBHEOloF5sDIfSwo/F/p0FO+TJnXHosgXvWORTu0sq7LXpxpn9Lpn0HKK0MZPq39fUd6jRvrVphr3S346kHuniUVj+CPCpSXYEVVpOi+pNvZV6bu8OrMacEctEld/CdTUVyvEb6hT+muasV5VA+YjP67hoBo7w4ZCrgXVqT5Tyo0et+PiXSZgBWo4k221tEboFXl5rbEw69r5s4GJoqCO6aGgjSpsL+iIqy9oSogiaHE+8YIntlhe8CzmwoDTlRe8SZQ/eOGpxwteY7ZiXvDJWdEL+vCgesF7NPl/aT0YcCL0gl841rxgzFruBUOJWQR/GEQKClj+CbbgBgx2Een1gm0VtSNrF0FRBj6CA2V3gilGLziW4b6ghGBJcMFjsxecad0oY+8nqGjhueKt2AuqMioUXCOR6QX/JBUf1GSXKLiX0EZwZ2lh+psclPsbiH73t9CMb/oF5BX6OzzE9SWJb/p7HM/7u/pxgE4X09+HScv0dVbuH+vU0D9CnKJ/ghnf/rkin2mr1i9JKL1/pbBLzv19KWbv/xNLU9/iaXfz/Ru2lf1qFPlO+ub7t1HW9+xiTb9md9ymX8eknOmHnB76D1GEhkptCeum0JVd5H43+v3E8bv/yrGm/wLvl+n32FOMfclLjl0kMsZxNJePEUh0xin4cZ3xDMcNZzyro9h4OnJkUsYZJ6K0OVnjGi9GMedlH2aMZnxnnJH46DjLlms8J5hmBzdew/7meF0hK1hXcMarMh02XtafPJsFZ7wkWYlxoLBd7c7Ge9p3jbdhUhjvyAToeAulO+MNu91wxptwsznjE+V9Gv0/jvAuRr/2I+dAJmzGsvHgjCuRcxk5V0pdgitifBEFnaHZaMxhzhnXoqB7blac8W30pUPmuMquenyjjef4OvoTCXOM23RhZ9xBsswZNyWjO25F/xqR8yCzZeNQOo7GddQJOOM39pvjnqzHjV+4/HHGr7KfNn6Ce9YZP+sWY9xVkz5G1ftGqHcsC7LjLwm2jD9kV3X8iQW8cZ+zojN+hwXSGf9hvdAZ/0pGdjyO4g/ZLo5HsMQdxJZnLXAWihbO7lqQerDA27oT3BS8ENy28GARb7eqgH35L+U/stBf7uN/uVXIQPBb8MdCd7Gu+H8WmsyxwqOinoH9JcI3yN/UgcsdkGnyzx43LQyJvWqIsNAAW0epA7wpQFLfKxbutflOA+Yo+pCKOXYZPohNU9EG8d3eOfBvRLIlKrNPLaaviXs2beEGxWwI7Y0s5VYJ9d6oQpb67YDN9AfZlKi7U5y3sOVZAM1mwSU/xY8GJHA2ehbOE3N6gUQHlGraIskR0F0GG6dVszD9j+DtNwtz4OuUKN3UqGb6SJFpJCf5JaqTq7nfsrAL0WdmCK28AlVRU76hjZKgYPYiKJwvZuX/EvwRHAmOBUHLi4OLmxURmlDFzEHHBI2fXiffGeI5h5TvntFVEtDcfXikxsRw8g1BWtFPQ6JUy4JP2no+bsHUmv4cUt6degPUnYJQn1QjRa+bqRCd2psdsJwn+lCfNIO3Q8J54f9Ar5lXFzuuKFO7+BSyEpgrZFhWCmlZgivM6WPRzmAeGj2a1uHGzyvksftaWIZrvRDAq19Ykt09VFnYwxGaL+zSU1iJ+Q/WMcogrsx8QY+VruDK7whOC05htq/AxZ9XmFSIVOEUkvLPol3PK8ywJKL7j7BzdOgUzmRJsFDhIsUrlFl0C1eohfAKlzDVF25iIwursrBb+Keg6+jjXpFryK94hVv92cL4YEFmK73ChpZWtF0SbzdKtMMFpVfYjoo9lGhEoRh9iZXEK5yy2hdYHL3CsQQFCvbQbM+UhU/4cL3ChywAFt4luVnoR4E/UeA3i6FXGAidr6hMaXDwClw1e4WRFvVCJBxZ+IsSP6DMptCQfEehLsKFUitUaGtTUmjJqFWhyV2LV3hW7k8IZxYeUfBT6Moqb6EXZfeq/y9w0/RpGNOfibYHqWiVT+omvJ/WUjun1TfLVGz6Ge0y5nUp2F/Un1z0ZwmOYNPPw/DTX4Fpp7+O6IHpr9mD2sSfhDjbNzLPsiLFKYvz8HjX69j2Ce0exgJ7Fq8/cHVVbwi2BJuws+XqdrdRRy1HvWuP6XWufeuv7IdyDdtnLchN1Hux+Yn6G1dz9T67sfon/txDzD8GOuFEnYvi+k9saqL+zZpXHylGE4ZbC0k5JuffmD2B1LnByLViq4CVifomss71LXT41AvcfOfE8lTnCrK+i0aE+gGn/Po+i35O+jlzkiWqH6MIpH7EpVv9hD1f/RzWoVwXczAWem8T9Qv2sfVSzK9P1K+4+69fciGae6R/1P+xT63fIPBQr+L/G3PbknuiLjB01+Mxcz6Re2ZZrbtUwrHbuLovGnq8ftQTbIXq03Sd+hRyAfUZNs31lN0e1NOqfZYFuM6+fqK+ELPb+twbnNwW2vW5Lr2S9cD2qTpKtOp5u67meorQg2+svgJZ6uuWZPU1UaivJ57TbdHiA6bl3CdBuS/MnH2J+gM2xrnv2DLA2Br/4P2Rd8gmKTdiHw3sTOR+7Yi3wJ4PcmOM0vxFwJ/Ibdjmt8C5mcht4t2Ut2C7S27LksICi8M2uw/g/kRuh3tdC+24Di05Xix0tzYsNGse/sM1C2fP+WlPW2GsOYf3bNNCP/NJPEtDG9Ln5y0JvdUvfi6n+XloCFkYKysSOQs+IctJYHhqoT0u2+CDJaKUpoAH/PTnCWmRZv+J0MKiQr/kX7XwJ7Qgj885axE6uiTTjX0Ll4jmfATgsHmmrO+V9S0h9RJ/f1WVrDI82Mbf3uXvPtWa61HCP74TwvVkgQpSmvcXVwCUcZe/8Xsk2b3D2ywTnFctB6ASEMO3S7OFOejrfIKWs9WWf55wTwRuvhJyBHLu1Ah/+g2/mRRMCE4re2J6NRVYgwzG9qowNuirqAVF2cPvH+EPSJQgoE2dna8CCT8rJJwh2HsAMdOmnZzsB2muaEv34YqQZZB0Z/B7O/9E8IIK3dHfPcFdwUNBSl2pk+gYr3fngEy6aeEYpLfV2jsQ/fmPnKYyE7ldpsFd9cc9FpWcRP3/WurC+7zjWTjpTeQOUPOZKxK9KOm4zp/G1iHhXmfI7OR1RjAWe53fyBljetnrfDGdeJ1BFPgdOXom8zp9XZ923hFX8TofzPBe5zOK8hI5PGJ6nTetQp1eFCYNSV7nkUNv50nLeOeZSd7rSL9apx3F60ROiCa4Tj36aMjqYeeB5zqvcxd91bgu9joyVu11rnlh9jr/ogRVDmFe50bii53LqJgrXj28TjmKUon+nelQ3zlnCvI6F5Ki6xxJy13nJIp5yoOx19lnS9JB/t3rFKM/h1H0bQnddXaQ5/I6u4pRQESvsyJBxM46C2xnKebZHUgn4Ia4k4+Szkt6r7OgO4VOLnLmUP/QSUfky0hNRCcrU8udGcTDvM6s1PV1kjyJeZ2UFCl0JrGq53USbJQ6U1GYgxplrwNXjHU83Ud3/OgrHrObo44hj9wN4pg5VAfnbi3mFmBg8o6XaQsdO8nWeErL3fN4YKEzmDDPLQ7gzzyOtY/tcmueaVzzjLiZmW9Ly9kzE6953oy5CxNu8UW6q4qvYrEtvkFzt9iTDFtRRzi3+IQxOrfYFId0sSWbh0X00bposR5ZJ+Q+3i3WkcsqNsRvXXwQk23xD5W+bnFo6ecWR9HHb+SAh1v80p9BrG7ht3ixiz8oeXCLfRXxHuHC269b/FTQEY+lbvEYlgq3eIJsVfEUNXVucV9swcUDybUVi4p+GOW6jZRecZfXVLe4oew2MXVbLCjWFjzJxVv5a5IzK94rkvgc3OJN5FwiE1W8glXeLZYlUVisKNEZK7hbPMfkvVuURKxbvJDx3eIM4n7FWXRvF5MSriqmlGgyomaCfYZbnIL7vOja3ZBb9CQAV/QxfFk0UqxelBirW1yVGeLiGhJ1xXXls4TgXTGQ+GJxWUH52LaF87C9F3MRdRZVoTlYyIvp2CR0zSBI7RazeoxbOcZKrHlGlMKsHNG/zbMPC8PKCbJK5jllNyy2M31a4Oy3JmipAwvN8MjC2ZoF3vIv3oEFmTjAAGb4M7qxcKEOCAkkK9cn0Fs4x3+xjn+I33SHCj8h/GaB8lzy9C74661eUsgaP/uUmv8htD1N3g2FfgPX98hqfxF/9ZZ4S0ptwU7ZguMRYbP4wNoeq+3fDWq3dEyaXkIwKxjws7kJvN8AoRtq4fsg/epTUAtUHHsG/okdNImXOyPlyaEgOW45xFiCMF4tbeEcod6X6DPoEm83gz9FyVs7eP9E4cqyCtvFX34klyZ5fYPDG+i4V6r41R/xAvL248TwjsEqS36zfQs+XRIvUw83/2XhGLr62TsLpyh7TAu5LTI2fgX4eU1IG/+Yyk3RpuUHFXGm4q6ACYpzM6JKgirOP9qeIzU5Jr2pPnWod+LndSYfs1KEgWll2+5DzcqdxAhWbpnAVu4VuaZVFFYNs/JPVihXrhFabN3J4GrrVjKQrfuYbzt0S7eYbuuaMdS6gYnJbaG0zG1pfLbKDPbWOboBW2fowm9doKiwdRTz7WhqnUo+tnUgOZLWvmaA1qHSFwW3pau/taePXewXtDaFzYZCtuA9cFvsZU26iUFck26hSNJtYe7RbQ0Z/S1Neq1fCai0tHd2W18MyJbEJ1vfQv+TxdhtvSLm0XqRed9WD4ECt/WGYe7WoyrTRfi39RRFbvHG6rbgzHFbHf1vS01hq66c0dnkthqYWL2NM5/8HcYmSzZoRbK9rTWVHHDX6baWlH5ZFoVbC1i4dFvz3GO7rUXNoK0chxm3lUbDkNuai2qC9VG3lRGcidFGmuZayQh3mQJvTQpOR403pSo4ku5seVETcgFsUN70Zp0lEQ8TgRO3ZU6wK2XEtm5L9JdbtKffntl2vD1HgNysnMMzcFuTos3fASfr21t2Hreo272tSuR85Yxry9sbbA1f2/nxFvUGt7t2AbjdQ7rErIht53ab/dHtDreQt6giuN3iecKslDjm327CeHErCwS3p5wQb4+5vbzd53Rye0D8b1ru9ou9zu0nZzCz0tOy+HvD8fmWd4gGL9sND10NjbhdyhrYtZpozJBvYzZ2MdGY5L6mkYAVyv09t/k2FtghNXJQoLEoOGcn6UYaJp1GBp60RpZcV2L+4kRjVTHsOnE+0QggQCMfjbVHiIdY+23TjorbFup9bhvi01vpIojTaKJbttGyZG+0lbLDqtQIeRVv1BXS4NKl8cDteeOF41mDG4rGGzRq9JCaanQZTI1HmCsaTzE3MdF4BjtO841vdEA1fuyob7zz/SEzFr861jX+bNs3hjR9YyT4y7auMebVo7Gtau0I7ipkgwNpQ7Z6G1u0WkNWOxrYa2+cwoPX2BcVDhRcFDwEW7RXNiQ11ihz99w4t1RplFiVGxfwXjRuhfcd/aJRi02eTDSu6VyNf0qkftW40axVZwarolHSVFFMZari3KzOo9O2mpU4ejWjH2k9i1TnYp7dqlXXuViRwgpTlYbbal7SUNVlxqSpBpgzMNUlWaSt+vBdVj3dyVRdRXfUxarGVtdU47BXVFOCSSal6qw6f3VGcafEt1OdFCfZyp/dKxgUjVgodcnVT5rVVD9k/7X6HmHwp1/aP1ZHzN3VIXwUptpRUJs3pWpLhTUV8sBdUrURI7FEx6qhwntRsrfIecFycfXZbntM9QkNpKb6KFS6iizb0qZ6xUxjqhewV1alDhl9I/w5ixwtGtW76ONWHzfRR1UFaFdT3RXciR1buC3/FsPcVAtwApnqphprQ/CUUWqqJ/ArVY9VlyMlOWTsm2pR+R7AnFbdJ9LKiOcMszJEOVawymWoE6xheDeQPtJgSTCIefPWsfu0qnWklTRYkCKrIAdjYrAIM2Iwx+2mE6DizIGZJWedGWUwyyOhE6Rsl3KChIKmsEYdTLOBcwJHjJeBK0YmGGlerePH9i2Mo47BCWD0M+vHYoFdP+HlcR3JY+tsIN4ZmxwDRhb4edfCxKEFZssAh1ULndM1C6dfLVjg38IOCdr8SSbw/rNgL6PvBwu9ERH28ha4yTe8cf55Dt4UCTbJe4Eip4oWZEoWfF3wm5RufUUwS6p98s0IpdU0AdMVfm6BktOfAV8SLgjdzXmhvgy65LRxQqyPS1L0qKsjXM3mPTFqBOfIwuwnCXgGdAE9C/a/LMhv8fmuODkL5yjMrZLe/ecBD/7I93oXGAAn+em8D8hChHsiWgHoT5Hx9BzZ/R2DOzVNECuh2t4olu8TYS8rSKne3gbEgUzpT+KlwcjdBZng2oLvRX69WFD4IDMB8Hd/tol/BBUT/PdOKCLRIe81mtTNk4E3oHW8BNBtg2WCIt15yDG1asE21TI7S3zTyut8T0HDeYXdWLAJMVMhJV6R69Q+CWnnFORIgfsUjZ/qWzADtWb2AIR5jSFZlAF1C974dF/oQG6GCrnpOUHq+vxDITtUbqtmu3IB3ujgj7G+jroLJxiJFTn41ZAR33HwzRrlBD/6kL7m4F2PksGHHj6Dz2jEvOpRNHjjXTrowvEWSDdg8MRbcvCs0dWCHy4QX3TQ0YgMuXV1grqdKpzggZ2UE9xyVe8ENY11eqx1rpXmRvASXsLgSulljj6oiAFQrAZOcK4fJV4jnOBCQUdiOg6O9XEiuC9DNcGBVAEGRaU51FSzrXJ39IId7CnyhuAmT/pOIF0p6zUuk836PbcWZkWCu+sX4pxfvxQv/PoVBuvXy2IbWK+wHq13NUmvP0bOk2bW9Rdtm9ZfI+ctcsQZaNZD5VEXx+h6Q2z36w8ya7/elKH29ZakF2aOtB1cb9M0nRq3bGa9g2Tx+kiz1y87ELM+hrlg/S+K3I+KekcJ0fqHEPwSX/76gEV6/Udyw/n12NjCNV1i5Fe51HfzKxyu84G0pueXpP88v4giYzefY9p28wtKNS+zafksO0o3n2Fid/NpGSTLz0WBySj1LDcH+RnYXtz8NJvt/FT0J8GWPT8Jp4qbR1O9m5f297wT8+1iuLciRT55ozRxdDqZvXXtGfbqMHPsNbg+dPN/vFC4+TGP+G7+F6VD+REiA25eT1Ju/js2affZ+UGEmHSOufn3WNFCtGG6+TdV6kUqgPLPUh2ff0KtvZt/lOqVfBcGXDffsUPLzbeldyffYhlx801l9CDYkP6VfN0eyq0T6lSVv9eNTb4WUfVOZd1Knjt/w6bezVcjR0rh8+Uo2yvdkeQvZdctX5I6lvw5bKpu3i79kPPUtq2bP4kSH3NiyB9JxX3+EG1K+SIXH/l9SXrnJT/i5ne4UHXz2xGGW9FXAQ5UN283AZa666vSy16eFZ9sOYUSGaectJt0p5wQ5C3MKU/LPyXowlPvlB1p6Cz77APKHqPBKcfR01k2sckd66xy6HLKK7HJlHXWYpN2QigHXA055SUkHMp5ppXycswHhQWULTnliK2lvIiZM6ecU4lpre7lObv/cMrZKF5Gf17h1C2/SO9uuce8VX4T83P5EYmJchcWHKcsVXJO+YlZodyCX6PM8dEpt2G8KNcR7CiH4l0pPyhSAykDp/zHWHLKI0x9lYfan5THMH+Uf2W0qjxQ9TXNln8k21L+ZlA7ZbFqOeU+ohflzwipD3Eel49RA1o+EjwVPBEbTvlAH4eCRUFd7zrlPbXCroI2eId0yls8pjplCXiU77iBcMq32mGV7xWvJvhP8FrwRrAqml3JfylYESwLnsc8CHOmj5KFP4soNfLiy1IcF8/rijceSItdHIWmHqI0DeusoGHMi69yvPLiWd6343O6i46npaoOSZ28dRZpLy8+z/M8QjyL1plCZb8Xn0bRoRefRH1gnFnDQyRoyTop6fOLz6DPT8JCXtxAiYcPu/d+eNdbQ9zjJTruK53DyPXiLlPAwyGd9qFoT8EPErh62AexB7QOTTyc0Esejm3nfThSwBY3sg8F8kSbrIV7TNgPu3bKfNixZ8UH9tUPN7ZtH6qcCB+uuYd4uEfz9QPGRx7u7BHlwS4mEw9s5B/OeSh5OKOICjfQD2UuUx6uuMV9uLSjwYv/6q08Pkayy4uP0NT/kII/8CHJJP4wg9UNL/5hj11evM/a4sW/4UKI/8BkEP+STsL4AIXF8WfBrn4/svx78TdUEsZfouxfo/QNu6R78VBsAvG6dCLH27xdevEObybxFodXL15VRlgz9OLXMBjEa1Eb3kfZ3cLBHi9JjWP8AoYrL34WOefSzIwAEV8VqaiOX0ZfV8jIefEi+w4PwaNT69gDLIEHUb84iaKcRs5x1J0KeqWIb0UtvCHNhvFN+OfjuzA+IB1l4Q6rfMjhwoRZVs1wgUNXOM9BJlyCFyDMoygtXEYNmQlXubY34QpLZ7iudGux34n2O4wJJsRcs3V0FA0x+WdCH8U8JkzQ/CacZAEOp8RDGM4q2gxrrglT+kjCKmDCcxnLCM9gHwwvJDkXlnQQC69YiUx4qegVnXbDsmT9Qh3uwhvVoAq7hgnvYnULb6OPmrYiYYHL7HBHldume5lwl+vHULIt4b42EeGhPorahoRHnFvDUyF/ovNk+C5pllDdzISfEVYDiTSFcJ+a8EeCO+FIGQ1FsbHgL9YrTcjNhgnr2MQwYcgbTfjAm4JBvImvFo/4JmwqkY7XYVvwMSpbR+PwWfBJUGKK4Qu7PhP2olhvcnIXdD2TK0XOeeSccQdochUO57ky66jJXYFW7pIDba6q6wYsz/oTvr8A347vz7Pt8P1FVlLfz9novq9tju9nWFZ9fxVdrX5A/X1/iVss38+zD/P95VgibR1XZip8B7L5vmcHme/HJU+Zu8NMhY8JaOugXdP3U1HahNSp+pPsTHx/OvqakjjjZ2Tk43OdS0Tnc5VNpPO5LOXYn3lZUfxcknDLZ46JxvlcZK38zEid9Sc3Zc7nHBcpzmcSfhfnM8Wc4nzazZpdZz+nZJzxc1IRE1phP32WgE+HdxXn0xVL5aeJ8ojb3uX7A/T1+l9wYfh+pJbf/2bj4/vvtr19H8kz3/+QBOFnGZsvvv9HPOfzUivh5xVPir4/pCa+r1sv3+fo4vvogPJ9Thi+30aMzPfrqNT1Q5HzgX267zekotd/ReWu/4JmYL9nV37ff+Qq1/e7yAz7/jNMZr5/hQpiv8IQ8P2ycjunT/v+mcq8QNDY90tM6b5/F+V9G9XsPvqqKeI/WHF9/9ouN75/IzpUpYLY3+Ge1Pf3VBJcz76/IWMk/paCjrkx8f1TZSPF+r5/gHxuLish6Fxacsi5OVtLk1vU+MvluBXOLagfB1zS5Jb0e11cTTlMHpvcamxy3TpMXnZWqpcFrwXfga4n/9hCJ5zG3/220H20XTU2PbRg79OClX18tg7YePRj3tst3gvit33BBcFNwVNB/non8u8uA8/6Cn9UuG2O2G4KcEXofVOhgaByfB0rfABmthNbWJhUyCH+l6TgjEJUn9eKIP5X2/diwaW+QdR8DZQvNXaehMXrlPJ19Dekxt0PxTkj/NC2YGx1XwEngsdgOqt/4OI+QKX1FwI61MIct/AP4gQfybuCl8jreVL82d4TmyEf94WKzFwqH8i5TipzA6GdNfIxj0Rey8v7CwKZO37uHysiVM6o1MdzvMK5C7oZVfcRAmaog5mlthkQGkGDDGT3QhXtQQIz7gJP/gQh0DQVniZfZ5LusqSec0ZtpoXbicp4o8GnP+WlLXZy+ieUzx7kXyd4WwEvgmrRE1dZC/2zgvxq7zf1x7NjhdBfnC2R/vQHUlD8Gs3qfHUUOiKAMp3XsgJU5l9bIVQ6Hcqr3qw+6p3Ok4ZmXvvUt3rbqcpqFgWh4dqLgo1gShBK+YkMuRJ7jS64Rs920kRYs9NRbBH6JvkzBUmnwGcKUm33LOhCYOf7WVC/qNzzEt/L9NwpUFxR7Y/JzgxpP+8CarmLx/L78m8R53FDo/RW4XMKOdNfmnbtQQEtBdB91iDLKiU6laGSdAV7yoQGdu7XVChtZg6v9HdfuXj6u6C8aN1iUj+rwG6W6Nfg5WyoFVWIuXlSCHiZbwqc08TwTdPNKcY1tVujKnNN/aKwBKXn1N/KoOMu03FNU0iVqVniXcFk7E2qg5/SzFuia4v23KJmiZLS/QhqWLWohjf+Z2GKpk7tWfCoqY0/K4zAVEXNDK1TFJZiEKwklJDuZp7VMHf8O2AIezuaPx5d/ZzR0GbWm4I4bo3iU6sA+mmK3rdC/zigNbyzliAk+VN3PKM1FjblZdRMgqzj0uuCukI1e2c0/6SWFOIKkv2fUewntR/tpLki/4pvrPkNurr795oDqa5zzXw2RRPuawxcj5QF7dHTaGr9E9TE+qIp/EWN0xrKT0dIUj9T0yTyotZ6oYqep95wZPRX4S2F1NRoLTXgMWPCfd6THxK4gYbyMRi4d/STvBaLY5A2n/fyq+Mdk43To+3n6ZqbswrVgjSjvn1Mh9rUCD+mhM1VeZlHNmnGTdDcvFQgbek2Gb/jR/UpWs7dJeP5b1FG4zSr1WaRtnEzdvOdM0h0mFycRzuT8zHWbXIe57biNGeo4hSnfq+YQBV8cVJBKRkAKiZj3xbOcvoqzuiHeF28YtyOqomtWy43LPT+2SBf/O9FT9Fc/ntFRx95wWXBQHBJh6DiuowDFdfYtnrFVf1awQxqMSt/WnAOwyxecRE2rWJOQQuC83oSyU3ytG5yqchJxqbsQSc3gzi3V5RCEK/4BHNcsSv18MWePt6iP6+cP4svCnqQvyEDsEUpAvFgnvq0Tod9l1dsKoa4g73irxKN2LR6xSGn9+Jf9OdTfz4E35Wkz0WkV/xRrG/BgdgCi1/6OEQJs1eEl8ErHkSR93XQLJ6KC654wv7OKx7HJu2BuLiFqXuvuKlzdXFDh9finlT2F3eVyQ6HyeK22vQGaSKv+I/ts1e8xi5r8V42eIs1uLG9ovg7vGJJKJ9z/VE8i3oFyrE9WKrsObV4yc3V+2WsZ2FZsAJ2zvuZPs4FSxZOx2a3Leh8WeB4exZ6dtLGP5I/KXgpSMQ9Bb+vC6Yt3NixwC2tTGBg4x24P2Xhbw3vsqPgO6KM8vKX+DmjeNeCV5R29iR/mRKIt1vk2yOhY7evqPrpEvxN8CsBwS7/5lwC7kuKPbTQT30S+7EJ3KA+r2TqHD4I9hRMke5bXREp2JmcJHPw9gYJAq5+BY/J97gr/42o8mbhzBaAf+7gH6HTR8RbN8BZMHL2Kd/Zq5DjaFNQcWYJz5xYMLrl+4S83VnRw+4VLXybJ2SoWgz/LFyizt43mC+d4s0pp7+uYAs4N03sXbVSTtVqTwrOgkWVkMM2P3s3iq6IPTJzmmry3oEgRD/ckLcj+EL0hMi10rBwCvTNAxSdIoa5ok84x3SH7ZBQMtr+wEetp9QtfBFpm4r5SZBfXFKwanBblX+s3NRdhvQL94ZCvNVVkobqbt1HMsiuyN9QZyKtuyYcZhcEA+A9qDiPHwpR/1yjUznpReV5qDi0c9Ej4IKmcA7U7Q/A0snRiO4BlJ+j9uab8uboFD90vsSaBQ+UYrb7grSUtxDIr/bZvlQ/IOsHGiNBR/PWsnjJLCEKTz5Tpxw9x3uh/z0OSPyRAt6pb+bVFWq+hakf4jVUrwx4eIui4QcU826WCa9ASScJ9SbpHJP3AFq2oHQuNHY/+W82RDNXQzWD3zO0jNfXkHGpx9+ksqMjmVXQXEgpBkjdviiaRtYm6DhLDI/8OSVUoW+ect0uqV0HUvgLKxqkpHT3oaJT0N8u9XY38e+nFQCxzXSUAbRynsjcBMwJXkJzR0tzgPpcUrNLADm9L4o2L1Tcn/mnkC3BvvAA9WRdKUHgVSk/IZvbzBDcu1cI9XIzIlcA4c0OY8hnAvTTZOuWf+QHr00Nhx06/Zjk85ppgmU7zV5osr2FG9d5v2M2d95rXEE77/fSHvadQZfJuwSDtvosouY7LcUf7ztKuyeox8X3TfkLvFr+GIUgeu28H8PF7Lzr0fL9FE1T7wfcZzrvRaytvh+KJ/U7x8Wr8/6Fxqj3bzSEOO8/PE2892Gpcd7fZUnt/YOLmfdPvUa8//E08T6MYo9g1A4SsWsAf39j8xM/cV3avPN25cydSSj2vSVU29QxmMTUgYXmyQZ1uIVw3kP9r1sY2L4N8Oz3W1RMT0wX712+ginM7VhI/Edktr7FAfL+RHHvzyqBq1ALjiecboqXjG5SMrbdGelO6U5L00oX6+JONxGbzFtnUoq+uvbgZKGnm64u2iWt4+i9t6vbf6cbxyxvdx2R5C6GY53uqt6Yuit6lu7mea/uLmM3xOkGirDEmwzW9Cy5u7ko74WovHnJ+XYzEWJpGWfrznFF62Coha+TCPfjyDlSjofRRzFyDiSv3dW7cxf7FA5mWurWkX4np1uQzHd3k92k093g4bp7j7Emp1tTojve0ru3vDRhi+bVOtUIpX+wymCZhoIqkVPmGsnpXsGh6HQvo8CLyClFzjnaf7qyB9iNXsi6L7yAdZ/FG9B9ipxHrg27XcGObgi7bT0tdVtCsilbNd0HIdlQUCj/H8xUThfTiU53BDtQV5rLugPBryivTwZT90PZwwtnvg2Mc49zlkaBQXWOhX5tAraeBcG7CTNzz/OS+fbtJhC2n4+JSFVObx4bkr0FiXj0cuzFe4s8v3o9Xgq9XlpWlXoZ2XHqZfWy1FuBd9/rrfLA1FtTPkGUaJlNZC+vTF2eyL2eh/RFj9PKROBrW9qLx+oWGvQbej1t53uzMojZS/Ju7/VSCKf2JmWEqpcQIlPa3vamua3zepeyWNm7YoPYK0eJzvQ+1hPbpde74Imnd6fMa0is9q4VuaqQbe1sezuxBNnsciXs9fYkpNkrRPlsISbbO4oiHvOc7fVOohj7bGx7h/DGeb0vJj6vN5AESO8b8dPej8RQen2dRHrvkr7tffDq1vtUbf4QMukNI8RHPAn1fnkm9Xpj2f3sNdUULekf6rWl76IXCvU60p69Bm9TvQeFvLK37kXniF4vittVq0SvVr0n0eKZJ45giR6whKQvHGU/gp8TwbKtrQV20luNzU40k7rLHq5zNGjOIJrUnLVb8uYUYNpOrsGK7aUW2EZp+tz9Nx1BV9DwojtcJf9m3MZu0kmaUpfeXOY7L+8SvbUZIPPUzBHiDPMxtzDRnEemoLkAtZsZkmY5TDnDZWa34SIiJs4wR1dwhguIoDbfQKxH1Bf0QgzneWZoPtmx0ny2Z4Um2gaaj7SsmRF7zDDLk2izyfNFs8W7QlNvKs0H3jiaoR7ohxk7Cpt/MS890RRTqTNMU3gTLu3mCFmP5k/Mi080teAM5xiizQ/bvZt9Om/zXfCE62hnmAKbI9X8mENUU8JdzUMpr5u54EDblHiYs7Rrp4Xmtn5LgLGpd9Sm3lGbm4K12KStxtw+0qjNKjy9TRS7Nf/BMtAs8wbbvOSZp3kF13CzpFQXsPM0zyJiT8W8KVtyCXVZzjBBb3WGPjohhp6MFhSc2OSTdVyERAqeTEQVfDg3CkY2apDnzltnlpdWt5CMYqQQi3YLk7GEY52E2DkKUxjJcAscZ6wj8Sm3kBNfeGEuSpdGoNctZKQ0v5Dl2dRFDp1/axJaKaxjo8NFYr1hnWVZWCjk7XbALWzbfZd1dljZ3MIu8j6FvShsAxN5bmEz+ipIpqlgj65EOWIFdhHzXrXOidhrCvtRzIOomGL0dRg5l6idcQtXYlYplOFEdQtn0b9S5FwgPlC4FWMK4unEv4+qeg2nhluoylpYoQkfuotst2udtuw5FTo8kbqFujJpRBEfVMUXbIMVpJjQLbzBde0WenbVdgtdLIgVHrFbUfiSP7IRUfhG2M4t/EROH6GhQsQ9XfiM8hmKO6cwgqOo8MvsaWb0zjmzz3L5z85v2xaa2WkLvcwB/sMLwVvBO8F7xVkhTt8VdIDPUxY6938KMfi9gfwzgvOCiyrlA/j4rJC84LLgtuCe4K7gpmBB8FTwRPBAcF/wULAoeCl4JnghCObe6Vj+f4LXgjfCEHwW0hYMhwp9ItTuIKjJrEJ+gXPg4c6An3O2JKg6XFO2n1oljtcn5B+0KLQg0f6GMvAVhTLcTVdJq/J7+AtxFfUpJD8Uc1OQKu8C/Jyw3lm3MEEas7Sk4EdymYdwbnuBXJoQ2iQfCNnIkMieUTFaApL9JgFf0M35HAp+qqAVwUBQGafygmRs/nLyZwXJcp3mdR+eVQOqWiPNK83t3v1QxAONYlKq41VSfpqgPZKXXB+n5aWe5tOXf4cC0uf41ydBLvNNXusvQJ+/zt2s7aebMiLd95DP7MdZ+fqzWg37Yqrz+glYGfrTDD2vP8W07fUXEKLs5xCS89CDkLaO3aTaXUw/qyzWdUvXDxjgXn8HPkevv4XZU69/LA6evnRwev0KNlK8/rm4Lfol9hn9O/bjXv8eVj+vX0PsxOtXtX+S5iOv35RMZ78t/YX9BzQye/1X8Y/0X7jv7D+KJ6jfjfJ91lVof2D3n17/SwxC/Z8ow2+py+i/SzK232c/0f+IPv60L+mPImfIY7bXH/PlDC+ZIpzhBcefYQmBEGd4LrU7wzNp9h2eiP9seBw5R9o+D6UjcrgfxZAkszNkwXKGO1HYNiIEznCLrfdQDPPOcNOiPZWA9BaaSaBv2y6owS9ognsmThPcSt4AlVht69xIu1RwbbcsJvjHm21QRqjWBJfwmZngihdwE5QQZQguWANNcK5FPjjBRE1wKvmE4FgKTYN9zicm0LnRBHuIYJpgGw3LJtjBRpEJtiQ2EWzESLYp3bTBn1QxB780vQmG7AhN8B1h8MMkbAKZsAns3o6i+1LZHbzDJRH0UN0VvEQlv0YVepL5wKArFB+lOjtoiyMj6MDxZoKW8muICzV44OU6CKU3OqhzAWrszmsSnFbFQWv3cCgEttu6ooUBmzITLKJ+K5iP+U3rLIidJsjCP2AC1OyaIM3YMEFKzCzBDNwl9ggKSpMRSg6DwqBNc8k6Bo4VlG6mJ35ydifTt6exRwu96z8LnX7JwtkCAe8b/HysWTgzaYExlwQv7+A/GhK7TpSFXwI27oidW1ZEZXX/hj+zS6LuHiH7FGcWQvynJ/i3SLV8TtIUifJ9C6aPSVIeES1J8TuHFvR8C3wH1Kb1K5chh7k48KxJyNm7cACpuVuCT5RmPq6f5OQcgI7Z3FSiG0KulfSoQUgtQcmJCvAUfMzeDDmkthTnk+y//+H/urcwrcz2qJVdrfqxRTLpf/D/pkweu6v4L64tTBolo+675OytFQn1yKG5wL9W3UKVZAbT/ANhd/5IMVpKTTO42TzJ2xRoXkTjv4BcySpBxZy5J0IfIKM7VuusUiXnmNZx7yh+/QpvTfgtpwgAnfUdhSYV+QGYoT84Keji/r1Sx3UCvmn+KahbgwZ+krJW6B9mh4bNQ9oD4u/SdubK7lP3CnZi/1lAMKyzgMBUXDugzrxduSZMFvnM5ha8HOaUOc+YE3FxmGN1Y3MkBi1zyN2AMUVpyzcHzNzG7PN8YcweDDrG7KIvxZgd2O+N2VaWW4wIU8CKrDGbkbMBEuaeOcLcCt6wLTXmn3TOm2uNSlOJTdqBY8rSkG2uOEkbAyO+MWeauEwPnTTGvGkImlfZ0DIvGtrmWVOXeRQvvenq7cxg5M2YthQzmxYbN2OaCntAR4IxDTGwGbh8jAl16DfjyPmVqn4zlFSf+UGWy3wrnmYv8ynuevMh1fXmXfVKinvOzEaEmZHOfzNNFbJHEbaTMMkZXxxkxpOyP2PgpTFxwXWpHTJr8N2Y1ajaK5GTh53QLGM5zpgAdnljlhRvUTDHNGUW4M8zkio1JhshkmGzbswcX61XGHZbL0jSt3oQufXGFN3JcDhoPXOQaz3ZQ02rCeiwSiDyujHRqtMXWiGMaa0H7kmQe61OvE5R69bIruMtu2BOtLiRaf1yhdQacAxp/UDK1jdYtLiDafUtNVtY72t9xBI27ZGtQMse+2ysA7sMtA5pqlYRkrV2WHBa27YPtWRxtbVL5C2scb8ucPBt3XEOaN3Soq1azCtMtP6xCWldc9fVusGyYatq26d1xUrTuuSw3qrY7toSR2zrzJ5aWyXWvNYMCh9aKRU/TQO3sHvWwqzORIsroJahs7Z0Vm+h1KmFCFprDfHdVp7vZb4XhMa8ar/IGbKV4yDXSts2amVjMxMtdG2brN0Y7U046Uu6oZMuo5MwXUHFlpM+Q1d0+jyWSFqnhMllJ10T63z6H2qUnfS2BN7SO1IDnd5FzYmT3uS+K13gEOWkdVOYPpaxwrT076f3GUNOuojuwPQXV51p3Zelv9GBmP7R/iT9gaxM+lNBf/Q4Jz0UPiPFlaRPeqzfTflbusNNdyTjk65zqZB+EF93+pV73HQPqaF0VzI6aYn4pJ8E51lrnfQCc4WTntOVcDodVSaDSKyTXo2irEvhY1qnWCedZxuWdrjiSGOjxzqeyvCVbTw2mbaOQXuLk57hqOykZ6W6MT0pfsQ021wnPaUL5vR0bGzbQ1cPU1OxmT/gyw1wfW3CzdrJyZ5Ds2WEyrNXsuKcvZSpOzuh2vN69pytpZutIV3oZm91bM/avvdtHdk3zP5D7jx7zdbWze4hTpHdRbbZze4o1+3Y5Ll1tnRQzkYmGrObst2bRY+Tm5WlRttnsDTMhGIPp1mJYmSL3AO52QP0aLnZfdmIzkr2Pfstq9DZAdpT3SxGw93sJ2Ip2Q/9f49Rql1ALPyL4o4ZMm72Vwfe7Ejn8uyQud/NdpSoHVWzhbo2N6vTdrYh2fVsPUoVRs6b1IRkX3U/kn1BIsPNPosSXZlgOJUNkdOxdlynfxKdPO1L3dppxBF8+sk1qDmVCO3pgLnu9Fv+rviBTx8ZZub0SavZ6Yt+vSraW5RPTxPxaV1/GjBRnz5o/3ja1CR/2pLa1NO2EklJmZtdRMdHNseE4GbnYysWYsHRzUqwwM3OccmTFXusm11lgnSzK9Iokl2WBFI24I3dzUoNq5vFZpV1HPFxnB7xfu9mjajJGczNamfpZpORMyvjytmZ6GuaRc7NYiXCfG9p2fwuMEaTOwyD5DaX4EkxEjjJXenJT27arbST3FKsYz0CJE9jk6fWOZHcW5Kp1knui2E3eSibGski7xRJXYE5yUux+SaZKp3kubI700NIsgS3sZO8w8ykk5QdGydpDyXkZGcnolxz8Z+8EXrVKKglLKXqPtmx2xAn2UbWNlmXVtNkQ5K9yVdeV5JY9HSSb/I/MoMkB5rnkj+Swk2+y+BHss8BxEl+amAnP3TnnxwxaJzkkANVEtk1J/mLjc7puDLFNpV1fM0jSU/Cvcm4HnqSMyowxbbdSSa5l01OSqomOc3B00ku0HBOEg0wTnIRdhknmZOe1WRa8sPJLJNqclW1XBfR1mC+d5IBLyzJJf1YZvqfNnay9hIl3WEnLnQ4TZzF/CfrII7oJcpR2KUU5ieuGDFeoqpDckLCXV7in/QzJ2qQxUvcR6lvuZv1EnfSipQoRLlscdBNbEQfm/CHJHalUzmxJ0GXxDaiNYlDqXdK7OvjgLN94kRqGROnnGu9xBGrkJc4llxF4gPZjMQ7XD4JhqWX+EFBg4e50rF1Bjq3J37tHOElxrHEgXVGivfHpX2igeiZl3iIEO+ITynRZP/mJVq8jCSemJW8RFfKJhPRKT/xRhf1Ej16lpewB8a6dV55LUhkUEfgJbLcYiTmIvKkYZ5J5KQQO2GbDZQWIkosc3mQyLOAeYkl5RBIVCWxJv1RifUo3grPRF5i1XZZLxHHLIaX8FQROA6tMwXrTmJaSq8TkyJcgj2Nl0hKciWR0rNQYkbtMAu/krFzv/tv4hWbIp1A4oTJDeru2uFrO42bLMBG7tphjMShHee2E7p20MvZZR61o37SzjZ2ND9aWIz+HHKdmzyKPo6lcYpRb6eZpBYRxrGd8O2g7lp4gWoRO9rdvnWuuIN3k2XN2HYUg4T0xLh2GFPejdQ02UHPPXWypnk+KeO+yVD2fBnPt9Z5wDiEm2zqV0uChnbAe0TXPsBNPurXMyuQHfZ2L+facc+Pvn68syjZQe1/Wmegwr9pJtdOApN2vtUId+1wtyulHeHCW9oUuo8Mr64sSHUlztF9kv9Vpqy6L3CZmW6PLTdvk3av3K2zve6iB886D+gg6DYw5dJtsmXvIhVgum2ZrOmOlfAXHWGGd8iudSTL2/0U/BAcMPpMV+tW90e6wOw0Y+zCnzQ6T3WlAyfp8Cbh2mnJhrl2PpLjy/5xUgbF3GRCGriSU/Rf105F0GUGRTvJWXS2u3aqsrsPO28pp4yyzepm385ULEZJWO9cO1NB4yWt6MlA0Zb1UpDMR222wmuFnbzUVdbIobbMXqaWt/sjP3XMq4SfOordWXgSfRxILCK1j0JVP3XIWuCnigxPP4U4lJ/ai+Us3IUCfmpDcgypO2QtUrexRMI697bv+qka+1Q/9Q8JhxTPj37qRqIuqSqyHanLqIQKcgypc4lIpM44r/ipC8lcpEpc1PmpV9t6fqqneKwgfuqRZwE/9RwleqIv+qlWlHkzcjgr+qk21PJTdaUN4eTzUw9RhAbCG6k/5GdSI4lUpIastH5qjCIPPzWIcv9CJtFP/cCT4ae+o3/v0b8+t3t+6oPrv8NvZjLn8Eev3YcD5MGcQ7qOc8izp3PYjyVOJ75feIh0Dv+0hT38jZyxog2jjxHm35zDNpOgc9gRr8gh3dY5bLHHPWzE7CJ5GEry/vCNp/LDF1amw1f9fmIVPnzWAnf4SB/yU7MRwjMiZSqGPyGiTFrU/NQ0U56fmuIg76dc/XEQRfRTviJ7UQeIs+YdXiO37KfW1cxr9ujhpxaURKygfmoR+ZrUnD01tNNszNoZTnrteZQIf39zk9em57YX7WzUXsIX8EjcXrbe7x+UrbV5YG2vsiS21/Cu82to27RtYpOLE22Hktoed5nfI9t925OKm2DSa9tqTLy+ce5qM6q+fxGObidlVzBFSd9/jJLvDTgb2h/IzrcZ6e2v2PVEewAfbPubW+3vTbv4t4es022byUR7jGR6+4/15Fsv5W26VrvBZGU3cvaA1G6zXHZWYm534nsbhJ/tgeJ7B98ZB9H2uZ3M2iUqdQG4pGa7XM63y3xXANcw3LT/CVYFb2UP8Y69Qrtmc2zfM/W0N+w0972PhGGBdFscR9rbELq9g1xSG7WO7X0AKiDbRTt+2hwy2kcA7onbp2h+zOjFP1OQqYnMJk+GXmYDEW8vY5ekjnV2Obp7mW0sxXmZQ1lsyBS578/sayuTOZFwZQYtMF7mSLuVzIUYITLiac2cc2HsZc64ifUyZY7SXuaKxdDLILHoZW5Y/rxMVXoxM/9kZSJzHfPtJiFzjwLpTIPFxsvU9eyQCTmmeJl2hEALcxUZ7oS8zLMeDjJPsDx5mUf09HuZ6DUh8xY5CH97mZfo4zMq0nYHu8PIvEeB/cj5EWtv5pulPjOgbb3MF8cVLzPW9iTzG0UUN0Pmjwfu7yrLrJeRDvFMnO2ul/GFmifoSmN2xpEm7My0wqak1zOT0Mdk9CelDJKyqpGZ1Z8s269MRvSYs3tXL7Mo7agZ7V29zAIzmZeZh0MikxePRmZZur4zS2yQvcy6qL0W/VqNqLdCq9cm7YCuJewU8+2zQazN2Am8NssZo2k74ddELWULrMWZXn6fufJ6/cVKQeixUQ5deqATcrnihHFGqROmOLSESf2fiU3aEzzirvZsE07CY4ahuzfr5BRtASk5J5yXPoAwC3+GE2Z0JAnTHNGdUIrUnHBNikIwgmePJmEePXJOaHfiZBxwpnckFOqEbQ4nYYs9Pubv7FkofBB+DczVO2Ed3RFOqDuN8I3ZM3zlEtUJeXJ2wufYpN3kh0/iggu7TLJhdG4Jv3XPEQ5k/Cv8ilJ9KvMPqTcI3zkhOmEfNe1O+MeBLLQbjpR1fnWyC0eKPoxy2o1Q2o7qvcX7kBMWhPqmMDvm2s8Jj7gYCg/Fzxce6EYl3JeZDIR+ybCsoq5gogsvZScTU3n2wBiey4JFeCaTj+G9BCkR/sW5iyXA+laLWXgjRq9QRlqc8J8Y3cJrKutVZsSgVElGTkqqXSuT4qCvTHF+qExHvxzxJFXQ2O1VvJj3aB34LLwKBzivYph/KiuMxcoqRPMqa9zkeZV1+Aq9ypI25JUgRsplMexX8jBJVcRs51UWIJNXyaFl3KssKtc56uNVMLFlnSyMQYvaG1rILr4C95pXedLZoNKMeXbUV1rSFFDpaIxUQmm9r9RV7gNizV7lTy+hlZH2/pUxB2ev8qXTWmUgQYrKj95FK33moso7212v8oH4e+VTmB3BAFY5ETyNOT3r7EfUK0rSvXIoxfqVbS6FvcqOaroruKeX2sqGXmUrm5KuqBREua0op9uoPneaMSs1zTqVKps0r2K3+naGq5Q1f1UqUU3OonSlyLlA00cNhQO/2Ixc/LPdbBFB7JqUty/uMptbaAldu2JVX9ywvbJWph0WuW2wgH4UM/WKhT0H75cdXTFn6RnY3xbcJPxiD7/3Y+FaWl6izP5ZkFK0VbzL9oAe85cXFMEOkdjeQFmsCPoWrtbxnlKgmbWTQGyjTJL5GwI27dCNJdsKsLOMxRy/2beDyE4jCYW7+EdfFu6eE2Vg547YziXx7t/J+2jEP5IExAoO+PwgrVfD+0sgtdkFFRMnw4UjUl4Jt5e86PAl/4byjVPQSYsQVIvYnGYUpyEIIdwpT/H7wMOCcqN26xDM6ShKukQAmKyDqPsAbdYbilvECw3Wq4rKr5l5vvcBO4q0hHeaP9TcHVyBSjDk38IpKCZpA5OaU8i+ILV31sDCn64JktbckMr1lMN0RXGa8hflf1AcNcHhp/y3ii8CL7QUU009rf5zI/zWTgVPCHlMknYhynldqaYFjaCntFHIinJOCtqpLib6f9Cgzq8I6Qr+0qL+ygX5lmlDcxISngGzvQ5pIL55WwN+DBVD3WJIY7lGvfSRzN1ZaOMbep63caRUNLQ7TCn8iZ4sMp2pi23QL7xvKr+2TC7VSeUIfcxAwfMKoJnTKue1JgiGySVFuyagQU9y2yCUbioGndVrjuX/UyYqeC5H+Oks/iQ4pxlTQzBeE8IpijWJssYmxXgpCLFNPbbvqEYcUpsH9a9lkTdxT3iSv+5YPRhdPLEaGG9T9e1pRaO3bGs2qI2UBzltQ9dtGsu7A6HtQL+o+PYiIMvoprOs0hjeKpRag3LuI11iVX3hsQesfCmYVlz9R2TNG6tbCiVGnyZ4PyMQQqxCwlWmD7OxqkiaYR7Vge5FKiU2Q1XgkSFd1HSR1WSSfVKw+sHivDDQYM7RY90ZNc0B9fGnyN7ZUMYbWf3V1IElVusHb/NOT5zbBezoH7HNtXLceFAuGvU5jaJ3qjanxj5guGU0Glr0Y+dG02wJvLYYEl5N9WrRsbYofQtktyhvi4HpTdK8W7RXgiptkcuW+kCZGcM0X/lF6HtWWW+qsscaEuTtjTWAxocKmRJMCELdFDOUuXux8IDmM88MPT+hWa2iefEOkplsNK1oAHxAoAMC3LhG4IfG5QJNv0KPc2ualj5IefCizktCZwfqufWepthHFaFxnSTOM13fOyOln6dXLVBBV3PbJCPgT1N25UDRoKAzJJqH8e9YQo1cZ6GaV7e51yyTetJ8An7mcEkLHSNvX9WenlXAp+CHoMbYdE5+kNqnIztFcHUdUuaZ4/dZTPZVGQd6+wuuUmjSXWBcOU8r8tMmec2V15oBF9aVKJoH8WfV3ppd9jV8p98Vj9nErdIY+2qFaRW5pn/0ux7E7kE9b25dA1d5Tp3Jr6HeolyT18Jb2VY45PFn4gqB0FsMAeerqehk5lyotSuaQy40sU3RdNP76mxUx7vU9FYTTWvgsgtNzAspu+r2XxofNUiTHOufGrvY0mSjkBojw8neqanoj05vV1Bl9oT6varU08SaZRw4bai6qRWlR8efp6U2qdUmPvNZEGRIz7Os7kAI75j8vQf8Y0b++EwZxAVpFveZKrnN6sTiJWd4CyftWUt6vxdrPIwtwqdfO+BUvXgd6wLsAbeGwqnaoXTlVVd0p1pdhaXCra5xBVhdjz6WUNjuVoPIWY4i5qN/83q9rC5gZ8GtognErS7yRlWd4wjrVtNwKbhVDJdaJxulmuGNrDrLk4hbTfJo7FZTCpvkfbA6Jf80kgtu1ZEmu6qLfYCqVLm5Vd/uH2tH3OS51ThHBrdq4GJxq3fcu7jVmnC41kNk9Z/UcFerUeE3UQVkQd6tXoktuVrRW2tV3CNu9TyKeSGWaCmedasnXJK4qKTl177uy6sHUV5FbsWrh9GvbbFEV3eiX7tc3LjVPXFlVzeiKJvwBLjVLWX8x4HNrQ7R4uRWR9JnWP3lFd+tjiO07Nmvbp1BjBp+8x5c/Ylw63OzW32P8vtki+9WX8SRXH3lArf6pibpwsqMUl8KFJOcW32Wgr5qU0hEViWqbVRtutWOoof6Uxds8Hi2iDkrC44naqfQfjFvM19chm+/tomOvhr8RJ1lexiv7XD/bE5hyul8cxmB5ZTURAfxmg5PQB1YvyY6XMJ3xpyxOr+cSTpok+xwG9fhgN1pc2XXafF20+E5sCNN1x2pUe/UecTuhLzHdXp2m2ULtCNgosM9fodr/A7PBthZsWe84aoskQ1XYvbEOFyLjS0MuGEaLkmn1zCP+JA3XOalbQg7lDfkMdcbLipWWhrQhlnZTR1mZDRsOCtr1cOUzorDBHI7w0nY373htEqYUpBsug0dDntDn9uqoadT2TDONUynSJ0PuFPzhibmH1rnTq9bw1u6jze8561pWEMGaHgtttbhTVRyNfq6QpOVN6xEX2V0JHnDC2xGeMNjvU0NTyUAPjxRVgeC+xKTGR6KfXcoBi5vuKOHvuEm/CbesBAR7U9Mt8MRF3nDIe94w3EU/VdG34ZfDD9v+KNKf3JqHH7EfOj2Cs+NN3yRor5hDyGb4RvavL3ho7Do6uJu+BxV9UmMxcMWb2fDpmK3ZVBnGErGfdiQuq/TfUsMc3rAS75tuMNHC53+lYV+Buh4d4TbUcffhoUjl595h4CLNHDUlz+n8DUSZUg0GVrgrqwSsLUPLFSAlUMLzeEvsLikpG38Gfx+9h8hCzcqr0X84Rvhyyv464sW9ijJTdUs7C8q33MLXxyhPg+8p4hboRUSbCbLJLmZI+R5TMhsWeVTr9ciPx1V9ALo9q+B/7rktSNEr1XQr5BLfYPQCuHe9zQxS5+ETIkaS6Br8q8gyk9vY4qftjWsf+qIn2tQ1s1t4r8kY5NaVIEp4rxeKLNjCtzoqCgq7O2Tg7n4svDpgIDLdxLNBwTX9wg5FU4ntJDnQVSvNgm8F973Cjk9k78n+EwOV5DZTIGmMyWST1Hsa1rZk4F5hdhO61XUpi29NZH7aZu/L5DL+6RuzqZQ2wU6R3kVmMT/R9oQLM022Ljf6kG1J3WPOCHX6h4vQ/w76o6fWxY+TMlLI/nxrIXbhPqJXXUj8FWG3niDgDkwcoOGGkq99pUuYZ7B2ixCQzdNL/fTdNV1yOw+XAK/qLU9IFkkvpYtfKRDOscNNRPd+NvD+7YscpHQfNJiB6ekHpCHnzwh+IFMvOS30oPtp0bBND3SLNML3fgOUVb/8P+Jonfk6+Q1Iha/VOe1CVP04eU8LUppYdGB/cMUDYyHqBY5tb8OGcHoIfmzThJey+Ks3kGL05hALibEQlqctHOAKS6S8vRYSRZQO4xukG3rZMXNWUyzCJgirDamuB6bdKyzJpslxTxsW6a4LM3/xV2WOFPc0VNscZtHQVMsyHjIYBZdaO5gRruAQSrm26VvENkLGiRkamkwqUfswRQr/sCHz8AdeFL4OzB6tx2sRIHrcI65g7XoK5DW3MFS9JWPHCyFuoMFMQkN5qOwRaQV3EEOZcWDtB6qByy77iATRTiSRNfglBc0d3AC69HggAnSHRzGwLAorcGDnagu29wbuoMN8aEN7iQ/NriFzO7gXlZQBtcRbjeRU8XcmTuowOTjDs5ZeN3BGSru3EGJJ/7Bi9B+E1LP4oAaPGHX2x202DYOmuI6G3RkTGRQF6/a4AFrJIMGa6g7+BP/cPFS1q4Gko8aDLnLdgdjffxq8zj4QkGJO/jmjtwdvEcx+jGo/KkN5eBDPFLFc3FvFc9iU33r3CPY5uxhCNqc7vCmXrwjyBRvpXqxeAPrcVG6js3prgSziteSLSi2Y1Mb1mnp4n9vl4t/gy2tC+tEEgLFkBfXvT3MgZliDzWZqJp5t84r+zCDTa8d60Tat7Hi9WadrnrmN+wdpvglHrbiJ/wRX8dsxg06Z+imv2I3e8dilSkOsY1zV4k1J+7OeLA27S6ye87eCa17V4uVJkox92zNQrN6BrTrAGomN/APryz0DkdA29tLsW6d4HFG0dvAmwI/9xTxeltwT3FSFhbrClgSnAfefFr40MH7d0xBffJqHuBdvbFw+CAvxXvd/yxd2VoyOxC89ykwXybDjqzqqAgiAiLqjwsoKrig7KIioPLwJ1V9bromnU5nYchk6XR2ULjYjEUMMs0unn/yyKEZYs5FyPsboJVNSz/neMytW+ru9knfSTsQL3eRtHwD6iGp00Su6nUKmkdlRtvUe8f8/CjuHh+jZF+CnoKTkfzRaM6pSxFDzgmfWenSKZ8hroNsndIxORukCdJN0gxoBo3svIKjDv9Z2nql+geKDEjHiGyjNOrEI4etvOpSUhT/kf6Sw3ZffZLOSMvg50SbFJZFW6ElzIsU6h/pBSnqvCm/2S3oPYrjrgdJA6jbzj8+o/7q65XPDil+BlPE76UP0BbOM35qE0MrOPEO6ZOlR01LvDbSuyxGFEX9+oBAAlm7ATuLLZ3hUH/pBIaQ9wc+93tN2zEt/CSU6jStK2G6cP+JvajSOQyp71dwhhukcX7wFcZkKsh7bVXwBTsNKvgszCeY5AQf4WRYBTu+iKUw6lBBWpWq4Jge84O0+AwOYTCpggP0AsEeNid06R9d1Qfn6CBVcIbzPsFPbDmrIH0BBKdwgxlciZ4/3JUSpB2RCv7AgW1wye9QsASbeRU8guF/sIjzVcFDDI1VsEA1B6Q5HncInvH8QPBU4ESAtuy6dM38yrAlCjawWaSC18zoChcRqeAlT0sF6/jKBv8x5hz7eyr4QH/AwXv0kHp5zwx5NU7wlg7Cgjc81BRs0pop6DLKoHsKanTWKrjO9o7hmj8VjPJKpiAdBgfDUpcAfdcG/TzoFORdxsE0aYo+QYNJnjMLxtkMG5iAqOA+PkwqmOWp7OCeNAEv2gnusBYe9hhVcAvWB/cTeNpQxSDvvSuGfMY2ddEvoQDMLVQxSle9xTCvVSxGYCGtigqTYFU06NzvM+xuiw6NNO2USdksi1n6MC3uw4m7Ku7CZqu4h/mvKibgtUwVk6J4gwfDinGecSvCrEQVN3luo8gtZFVM29fzd4as+vZ7WQBtvlvq9M8sVYWkpdrxwLl4wHP5CjS2b6mbToN6fqbagHwiTJmUpR+nloxccCMlyCU2SLfB+e7zOUoasnQBQWeJLJwyC9DLQPDVQERB0LH/GtATZJEd4vn9m5wmJEf7fM6QbrJ4LT7v8PkRhdxAgZXXgYY85PUMOs0LChgIQvxoBMboB5HdqaV9B48nZbKRn2tQAmf+Bc51FxrbEVAXlV+yfi8TiqMd3PDS0s4FtNxekjFDZBuNav7uQPcVn68hs3PFZ+ThBvLIaVjHcxR1Gn7y0SNlxaLIzzxTzXPb0gF+ODP2s1yHlj7cgHtsySOY2qsxHVI4tSmfq6Dbf5Z+obDOw9zSd1TQWcc74cbRZM715drvN67CaJzjatDGP7z6jQv2WY1LHkNqXJPXhL2xatyj88M+67uFHG7ZaOTRzzUOfHa00yjw39cossdpHPF8Y6OE0XKjTDUVXirZOGagypQnHAQ3apLmFAcIVOOMAlMMWFXjEx66G1/MeoaTjthztv1EY4khpGpwlUY1eI9U45livGuq0RVlb3DU3eizn2wMON5qDAVGkusY9pUN3KPXk5NTxWOMMnu8vZ3+V3pfuASsN7XDyd4HurHeAqs+cNRiI79JB+g/ekOYXPR6drDUw4C0944kTwhi9ar3ilEUtuuRb5wtkIB7PdVIsswpFjbN7qSxycps8cBZY5udaGOHPsob7KUaezCAUY0s5WDk0jv2+aFgHYdrexVeAN/QlHXQrcAiAVpo7KIafpiKNgLSGEGeAW2EKBfhHSuNKJIWj3CRqSqWYOijSobixSIOiRZb+CgUm+yRSy5/sOIDbUuLbbqjKd7h21e84Lm34jncgKjHZ/yMxX8wHlPFaxFvCFyymsUrnF3QRm5oMG04gTIPuKBUmxuch9amya+JgZMXba5gVWIuceZWm2uJ+cfrbMw5o3AFiDYn6E21OaN7cHNqS6FNBTcma1OG1Ykp4qCQOYThnzmixYjJiTBv+tMG7uK1WcHFvDZLDJC1oT2j+RXxGW+gMB+YZWrDJUVtvuiEw4wl9I5bY7Tpw5mVgTcHbQZ06WTe8AnV5hW9vTaPmENo04FZjnmWSj3B1Dz3xER7cJVk9mHzYrI8tm3osMls8QyB2aFxpvFwnkubNP28mJTP/2shg/MFJo6Jg0lieVabBJsoQndeJowvjTYxzL20iQrTT+0h3glkgrQCMg6b1/7yaB2YuOQe8e7lXnEYC1St3S14E2MpDqOS3Bsmv5Y6timWV/gHlBKW5l4wbrLUjr5yXazkWerYAUWJHuZzAyzfWuq3708pbefeuaEQO4/I9bCYnePll3gvTi3c8gySaXHmbHD5m2PuafZt3yOsM9sXg6F/jKrzrl5zgcmefY/g4sQ00NaOfTMYqmAs4JhjmDY5pipwgl/KMTXqOOWNMfbt8kMw50PiPK/ItG8PdRxSrghbVsccMcDbQ80PbaTNLwN/ElhhzLOq8g6E6Rv+4tNXLqvbtwqncsynwJdkMaN1uZnT/NkseKGPfbvszNi+aRSEqaxjhhIYCUwE3plxBwv59hVjAz5jIcIxL6K9y60P8yrib7APVqsjlm2VZwe8ykmoIHAAcPAqouLbuGrUvokM7NgexjG7GLc49iVW+BWy6God+zLDH4rZwJvi2DfSjoQdvJ9IlpKfKC2SGWHa0Q9KGcKmgn1pbdc13ebzOmukcNLGMRrXstpXFRGG09lVC1+V1S2Gg6srWFir1T+MoVfncNCvVjy8u6qj81qN4eNJrUb4TKw4il9NuFqz6kvMAGtBqxd6g13hIiRdOMOVPYUT/HcLWE3XhSpeZ12osJMqHOHl0oUS3K0VDilwgH9yoUAOLh3UhQcYCRba/BMW7uCTrHBPI7jCLWVbZDVhLV24oQ6MyHWhwWxxM7UuXNEpQ+Gc5myFf+i/Cvgy6cI77wEqjGhgWBgwryFT9njmqdBnB1LAhowudNFjFNjzFHibgC50eMdY4ZFpfkn/YK1dWPJipAK/n7owl3xmNOEufLP7LvDssS58sZ5T/BV14YO/yZ80I0/wqdU3z2uvZjwvWojyZqICLp3WhTBPPxYizDhI93WFEPX58RHRhQAraxjP+9R1QbMW9JSmC0pKlEWnX9jHAoYu7FIB/J3pgiesHSrYgt17YZu+7QoZ3rdU2MSCoC6k2Jxp0gSLloS5mS5s8NRnIY6R0hTe3IzXgpWqd0uHaN6NQBN+rYzXxrXqxrugvw+vDuNaD9eOGq/BDRPvGs7NvCvux3iXuCXOeFX7UhuvwpgyHYJ5Zzw25MF/ovFqjD+BQzKvQGtC7wAnk7w89pC8nG9paYnHnbwjZnZIXX+wyTTeEmMo433RfM77pOmf9wFjQm+KDSlvQW9wHu3IjTfE2QLjDXjYyuuzBO/UN6bJsPfMU0zeI30Jex0JvYnaV15o43Wt/B1uxDDeDrdoPM/nohDbGMIZb4tnx7ysz48i7WGhy3i76K2MlxTJhEBcYEPAjqZQxIyULQ0Ph8ZL0deyF6JbOC8IG2MvQOonpQW98SIMKO4Leeu00/Zc8gxsHT1Hqq8ta5a0nU3/Bwua/SUcjPUxPuj/Ytu7/4GZW3+Kf1wf48r+DLr6OO3an6Nj6PfR0v0eNhn7Qwwp+ui2+2Mca+3TeLv/Top91P4T5pb9Lnyt9N8wI+03fa6dCbRwT0n/HgvF/TtS3DW51oe7xD6+eP1zdAv9Kyyq9huY3/avWbYKNov7ZTR8v8rnYz7XMHbpn/D5DMsQ/VMMNft5+xb0cz47BznAUbd+EauW/UPKlfDy9I/4vO3Tdg6Gpfi1vodV1D7uhenv4g3oZ2H62o9jM/cFowdL7fwqiQXNfgLLjP00ZvH9FAvEWz76vOy4H+BUN2j/Wv2Incn0wyAx/AJROw1c6+N4al/hLFAfhitrfdf+v/uGN80krn2gDbSem7jEQZ8E1pUsXPg+LP2HMruJBxybSbRJ7zD1cBO3OOGTaOFUTaJJ2RvYZ7uJI/uhcxMlHkBJHDJQJD0gLZDmSPOkp/iiuokzbHy6iRPshLuJmh2ZuoljrC24iaooK+PQUaKCK1IScntMYoaldDfBVSEXRvxhCyv7+XETfzyPlPjBbqSLAwMo45vAC2YXbqLLq1cSdIXnJp4lroN1YDfxyMq+C28kgmMcGXATAywRu4k+z5wuV7zoZvmHj6Ze/rLfX/7w1OnSvv0ILcRT0BxGx3o5o8+5D3rum6KvXb7TMcByTEPu5Ygeh4bihWgAW/tlXwI9Ht9dvknoFdb5etmV0At+W718ltCjQAfnY9xEhu28SZriOa5EGrfmuYkEeUnfr6VxvN9uIoslcjexz5hdWGS4iT07/bbgkbdDamx/6ibgEN+CxqGghMLfQS+PsVGglxVU2sVBCPxQMRy8dRNhbOu7OPaA9vfDF5+b4D1Gpn2I/QDTLtKZVfsInXO7xCMR7Txsq9sHEihw87p9gvOn7RqcGZr2qcAZNrnbZfqrale4/d0+xgTKtC/x+5j2Ne6Ba/+jlXi7zg3/9gW+laZ9h49C+55nb9ttnFEw7SZmtKZ9i4VP027B3YVpv6A7b3eZf4enLNqPmNCY9hMN09vP/DC1Rxg8mPaYZ13bE5+/YKFHW+52Hw7q2wMc2zPtGS2029/0z9+esydtT/GJaX/QlLv9CYMc0/6SkuJqatNe4u9t2j88l9L+46ejrfm1ajvY2zNtgzN3pu1iKmPa6/Q636bbVNMOSzv7pZWCLE8IDj3bKTZ9WrLMCMgNfO0N3t7WjvP3SLC1kzyR096VjPd4JKidFeX7bO0tUbHNC/vaO1wTXAwxAV78w4RWLRowO1ILrosuCjgRohYVfJDU4ownG+spjubraY7t6xmMvp36Jg9U1jfoMKEex5H+egI/vlNPYk7r1PfQ8zr1LCZnTn1fdG3RJqe+TZ+BdQ9nJZw6rpZ06jjH4tQdHn2t43Jop77Oiz/rYdheOPWIKIxJcfy4frQ+gw1QnZZA9blELOhNof7BW0rrn5wj1VfcLKsvKfjDrcP6H06zO/UXOORy6l309079lfOP+huH+/UnfDed+jPPENdH+Nc79TGm9k59Yv+FTv1dsulLhQbc66tf0hqpfsUaXWOXsN6AB4T6OSdp9X+4ZtOp16VQFwJ3rNC96HvAu+3Um5hU1m9oBFW/9fntHKvekuwPJVURO6P1I8436yWBHGzD6nkc/3DqBzwUXS/gsI5TP8Fp13pN0p4KlKW0FQkdw92nU69i7rGI8PK70tLnLi382lfv7hVHpDppDCc7KV4S0dnkG9fJkBcn3cD72dlDC5vOLg5oms4+eVnSbRzS72zBWYnBeaJbCx5913UcWMF0DPpK01nHKNt0FK1vOhFY/HTCzCHGk/2dKM/SdwIYCHW4G206IQoEOa7rzHhKvrNgtnPGTPn86bOdQeeHnCXz/KWz306XtkadN5wa6bzCuKfzSNpBt9B5wj6A6ciQsjPCZMTggjvAhCPVTh+dRKdH1UP62+sMsKRkcGWgHeV2Goy65pWIHTkW0sGcz3TqjLnH6K5zx+c2b6Lo3PDexU6TvBa97HVuJWVRynDIsWWnxNM4nSMaInXy+JebDi/JNJ2aJOBagelUcELedMr0Q9w5xvz1zmCHpVSBxZRTc3jVbk0LuAJGYB133NYUvZnWIhgxOrUwXuFajPf41qJYYHNqARH38yh9LYjplVNLCWwKZLAl7NTivqDtWWobwkxKERI0GKyJIWZtV+L4nXRq2+jdnRrM7pwanBGou136Sal14SLAqb2J+CsvSq7ZD4VnYYxmcmrvPpSuj2P8tR6OWzq1IQaCTm3ALqP2jX9sbcYJf+2LVzPXYJ7n1H5gZVBbinL4bHJqv7QkqBW5eFI7JK9EsTwtDmoF2pXWDthCZ/jMOLUKj/TXyjzSX6vywHrtGIfOnBr7jtql8Bos67UE/lH5OXyUOzVepejU6tLkdyxym3nAVaqFJprl7Aq92mDXfmgGe+g+BlkkGOxjgWKwRQ4W7wY78Nc8SGFmPUjbkckgg3WSwabNcgCzi0GcsjgmP+B5wUEYM6BBBAOxQRRLeYOYHb0NOAce4NzzAK7+BiHMoAcaM/sBzjwP4P9hgL22gYI8fNkMfmAcO/i1/eoArTr49lXXBvCMMVjYCdlgil998OGzeYywZzUYQ3LiM7aE71gdHvTwCp9d410fDOG7bPBiO9BBF9OzwSt6lMEbVlsGHayZDx7ZCE9oycEzzskO7pjo3o7gBw+2zQdtzGkHTYz9Bjc4AT24xXs8wP7hgIeyBg3bmQzO4Rdz8A/d2aAO08/BBbqiAdxIDeyfrrs2OMXptAF2IgZV7GwODu0/eVBEvzE4wr9oULJTkkHO5xyvDfK2kx0csK2xk7Z21+EG4lnDtpV63cDKwN0jL8R8xaWXgwKXK+zoG8sVCUz5VOIXW5h2DI6rxxMcBqvEHD7/E9/YBrDjdxO18MWtCQzjaxam6CCUHXubRwsT+g+yY2+situRuB1DKxz2tWMEOeyrEvAwo3DWFxJv9AhpB/wMsR9VdsBP+Wc6vLQDfiZ7hD8dleC2iZ3cwA+enenAAMXOa7CqhfPICwtNzJQVpkkxC1c416/sbAn7mAmukiVwvlthtgTWOboEleAWToKuOe2sBu43MZGprWE79svSjW08FvyWpluWmD0DRixFiZWlrr4H/+PW0kgD5Bpx1XNL5x7CiI/cWaLTF3i8AWmCQGOECV+hww6KUr7EFohk+wG69wh6hTi9iEIuHQKtIjOV2Lc0CmJ+XDBaSUudx3VLvQ5LmQUj9g80sgstsxieCxDPIGWaEqykiu2RGta1bmnhHY+vYH+iTqFDCJzOkWRZBg2jQiq0Ablr4aO2TifBLDX5VdJj0hLpEWMDfD4gzbOgU9APBaofKINW1OfQvIFa7OxYssUWe/0iZZLdTdQH5XSDFaT4RN0yqLHylpaWg+BWoFM/ofGcIMqjagU8Z+/4jDI8PPARBcwgL7NKQ6L4BgbbynsG26DiWuGnVOMq+WghZ961dJuvxQg/3ja5KbS6yaLV9S3SqyjqoC/QTGrrEr9jiYywpUsU/jvHn1hRAMUxp3iDCsz9occMoiwrG3DfAb1iYVcoZ/CXkVLLKmmF9ISUr3mtTnpBioKq5ytLd8PMGk3vDPHupfBovlDkUNuSTaSOIEEqDtEhW/R+RIoEJjbg2zEEvX5CzfC+zzOI20R1g/wv7UcojN/Jjf6x7GyvCPgV6gjYSUaijImoSpTwcVaJIp00Jux8FF1NAUuXiQOfW7eQx8qkSuToVDeR5T98D52/SnA/MgHLVgvbGEErO+WmwpQdsyo7y7b9s7JTbmw82tk2OqQN2sDZ2THFQ9LdBNmdBXBhusIMG6UwPqR12APb6TZ7k3X2ba9b3KTpbeJT6OA+km8LaZ4r6CUwq+klMTt2ehuclPSyNOTr7fO4RA/Fdnp78KLe81B4p7eDoZzT2+I8gTegOD0jaWFA6vQcOqrDfSp2jtVbl6govNL1Yhj298LCCoq+EDUEhNeQbK/oBL5XZ9Q56T+pgJ152xFS7w5uhXr32L3p3UqaG3i/63HTqEcPfL1DSVKEDz/ckYL4nAjnsfnU49qS0zvjUY+eeFTCRSwPFo55TgY3sYApXvdwEwtCc5x56dEFHz0AOT36KHN6HzTh7K1YFIzYnZ4dMaDFu/TJ1HuiE58ener1Olgkd3qP0uIT+vzrvdN/T29EF0G9sfxeA2YyJO1JHn3sfr0ewKvdzj1GhWqnBQMttZOnhewOVjvUZIduSCfbGAioyT7enVf76to59mQXdwSqCffVJwlSLsRP6BN5smk/5He8IntCL6eTIJ2iTgIM+PkvmOAe8ocUP46TMCaEaqIYjz31OxTbjhn3Epaaj7ylqnAATmNlafTcEm/fkp9dS5zWEaKmCzx/pUg3LI30kdoZkrFlqes98NmBvr0hOReQSfvBN2Qo6H60pNyxJPEBMmUyPn6CLBn+xeMvH19QgMgMSW/JeEcciE6jQMzG3b2xdCsKgZs30CVq5lT2IOdnLUMhclAIJ/xIVc9ImvqDTCxt6U4cgks0kAk2QZPXEPxAK5j5BDmz3a6zlm4w4eMJHlEhVV5RmGX7RpUd74N0BI6C+FGRj2C7YRRXfaP8zlHd0m20oZ4tyC7h16iAHIPUwMwOKJtEEQyqY67q1IeGCKDe0QZVBCG9MYbEA9S5mW3wk5uQQ2vsoFnNfcHS2B2UbiOha4dJVuxqbukI+pVryED99xFnumgn/ar5DKXO1aWlebSCfkNkAr+VfkcRnBmKWkHF3e0CSx0mG8mv+VLNAohcR6QTQDuZP7OmX3lLfBuXFKk2/Ym14T9JtXGXjGpPyOEosP0IK552l8+v2BJS7TdadrR/7B9atXlrkWpPuffZ/vK5mxbozrJdpnreJdE+xnkd1T7BVKBdg8cY1T6FKwXVPsMOXztHsTzpAUfH7SIHpe0jWva0xdKkfUuJFs142nfw9aja95gdt2mkqdr/WIVLDPpV+5q2zu0GE/n5nWkHGQhD7PUc87B2jDXhn7jtwEixbeBUs02jw/YWy7zNZ09KvscA/beqdpyBhMSksNWi2mncCqPaGUZtYpYc0naaF8IhdFJ5vl0LhXx5kCaI6oKiW9tDpxsKIA5XkVmCOFxIFvL7HDs1sKBaEYbCHwD/4VooBvkY5GOUh9+cUBQ7iJaaOOkM1LE0Qn6E/Aj5EfjCCSWhJAklSSpJQkkCzASYCTLxFQ3FMdW2NAJiu3VL3c+10AYYXC6wVAfXQvByZUkTBIk3kTgDZgbMDJkZMNO+JcgFiFJrIQzFQbXD5xtQc71218N3NrSDMYelah3UCfHZvqD2t38cAVRuyVCryNBjmaGvBGDcBDUfImHnBYCLU5F/IlSpxDSTEjojdDxC+A6Qu6LipqHgPEwI7gO0/5dx3rqEpgBX71AkWZe4A2EGyXxlrrqZY6jM8pUjoNN/ZH3XBRgTYCK1xTxds82o00+q+Dph3Ms9QydZ6mAZ9e+FSLwCot+sS5y1/ppRU5w5utG0AEvvBsZUv8mW0IkbQuhCmMcCVebpSlE3pXWPaoAhU+udgKhk1tpj8+ik/ZU9WEJZ6j/ls317tsnZxgWu22RsYVnR0uARqP+S9GrN9RcxLQ3t813e57u8z3d5H++yKuE8biiLTgD0CzQG4hyshfZwA4WlpgB6uabst9vU10K7uB/IUnfO5ynp8ZrO0q90dob1fJ2dM/BBOsUCvs5+McCr2bIrPouxQ5bXuGVpEpbFbb06+0aHT9lXsp5Ix6Qjpn6niVuWbqWzWD7R2YGwLslqkF6T/oMlRpaXWmUvSOukd/Sel23zVros78TLYryos7yDSGdbZN2SFkkPscGgs0cCecmxgA257AG9UWVrlKMb7myZRT3mc5oeY7Mp7Ejr7CZG1zqbwR6Rzsbpqze7gTUxnU0KJJhsj+6isjT70Vn7s7QsbMMGLrvFau3QO23Ww3aszjqSVmNlVGddCRmBdfq7yiope0SYYYGYQFQgIOAXCAkEfa21uz4Hk3cdvAZPdsT2aamzeWqpf2aJynXBfjmw1N3ZAKf6YKlR1xR8xnN2B/zHlaV7iNSLPUuPR0xZpHSZKZd8TuD5/hDUDg6efB9Q7jDS9VIUiYHOWIBlErSQs9TLg53+sXSEHIwdStosnhykX16AXkzAn0dIHVINfvgXNJKBvB8KnDLqqJuIVafQ4/xU8exHZUzwnvL3fH4H3060LecdJdO5Hvl/4E/6rGYFsfM2OBmUQd9mIfO9IP0jRQ31wS5j3yz9vmTSc9BWCAz8BubhilQiz0hrpCfgv7b4jPZwJnegwSjlnyytslXjQVA7LrJ04wO0i+ZyrvCLOOth0i3Q+DGSxpCJuUIJ9Dt+UWe2Yiwq7iTQmG5gaOkulJlxAyofXiHu/YPICpGqZvACgJjokHSwNlQYMAzXsb8wdPDVG8KjxdoQc41h0A48hgHszQ/p9m4Yw8roMOrrrQ2TWOce4qM7jPuGa8MNPGVghjKEndHQw2RouI1d+CFMfodZGGcOC9jWGNKj4TBnx3fDEgimi0O42R4e+oLO2rCKHnZ4jNX+YZn0jJQbycMTrLUO6ct5WMefa3iOcx7DBmXQBQ2v+HiJ8rfg13t4g/3UYZPsB9J70jvSZ1jDDnm5zfARfteHHT6/knZJX0iHtvceDvjYZ6P1+Pxu+/DhhI8jXJIwhMHY8IOPUzuUG859t2tDLKcOf7HmOUT/62SndGFgu2zQTwl88YY326e79xa+eVzPdu1wcpBd+NyOBUn8I/I89+Zk/yTZihfoLRvYlFteY2KexWaUk33k8oLt2HFJXvaZFprZF1Hxii12x/bqZtdCHyfXHdu7W02O7f2NpWMRnODEiGO/BYw6pycI27djFcN+Bmxv4WTl9jn7OYBfBNuvwzOu7eSRuoXlgOw9nx+w0+dkcwzksaTu2A4dego83Jilp0zHdvwMlbh7ZPt3+3452QpG0o7t55G4yp0U+wHA3XnZU+xLO9kzbHs5toOHo39094CEAAyfHPtd+Gdpms8ZnkPNYlDoZLmUYrt3ROzRwNN+BXAINLtPvxm2W8e5U9utI41mnXg5gmP7fWz02K+ACVjwY67g2L4dckFSLrLYL4FBIArPylkYAaklh/LLLVLcuqCWHtYIlrswuVPLLHYe1JLD+mUCXx61TGNhYZki3SSFr0a19GMbUS2DmIAsIzxKuYwxYRQ3i6ml4rL50uHtM0sN0+aly6M6S4NplJ1RwNdt61XgDZbrLb79utWl9XjriRaXrQnuvG+9+wInFkY0tG5huqZbvGu21efVy605hlG6tfAF+hZm2HHTrW/MtHTrk5cot75gIdqa8oaL1gfPtNU0P9KtFS1QW7909Nj6g71qa4mXTLcw3dOtI6YtwQBYt4oMHLAQBSw46lYOI4xWnvdXtE5ZmzMacrZqsEJtHYsycdvYKsNxuW6J+WurQXP21iU8+bau6HGxVYc7Pt26oC1865za4SdGt9qS4Z3PObdwj2FV6xb3KepWC7+5bjV5UWjrBneG6ZYR7S46Sd3S2LXTLQfGlrqlYIDUWoejY92K8kaRVpiFj9DevhXk6KJlZ1to1QBHO60MrZNam6I4hU5bt9L8MZL4K+rWBksah8WKbmVhntuixYVu7dEDZWvLh1bfplFULQ6BWgLDvVqKxqK1DH/7Gs6u6doW3iVd26HnUOy92kLXaFeiz57s6/jjixQtMXYM8WMHKLtgXFiiWiNL9WPT0tKVJQnIJiCb3rfEq4Acg1BJ2JLjBIIFS0JIEWpZ4vxA0MlPkUl5RA6yMvMS9McgXWyTfQda+WfpxgISwReUo1kFY2DJNmQz65BqIrtM1JI4lCmDYuoL6DEvb6CzHdI9iPSRxH7PwIC4vv0GJ4esVDSJ5wnKF/21JItyBZcgP5bsU3SFxMGxJYEGC99BUsY/WBJGScIxSD6gfGE0hP6O49GD5C3CIeTnRm8gNjqyNMWG3rAkfw+BLh5TKEQKDaCTaTx+IcPxBxj3Zs2oL1piqCmsNI3CdqpRcxxnMOob9xmoGXZUjfoTuV8651TPdFarHmmApMT/r3qj0Yd6pbWB4lfUqBf60VRDnwvNfVpOqQmzGdHuV/HWPKPqtMhS/7CMbdQVvXByZmuhRRMudUuHMeqG1yeqJu0nVBsTdKMeJJ970XlHqwZ1wHsXVAle1Y3CqMOoIryeqipuasRVXAicSS6nAjUBXjVolHizURs0ClObTJbhxYZqB6bQypMCy40GuFgK4uvwh6OMpHVEkxYIwbTJqKDEBQT81BuVQIS2y7jramH/XS/wX6NrXYE3gZ5AH1ZIGnYJtsOtjWAvq2u4g1jXprBsU3d3/K/XOEmsffKwPkwW7P+1xrO76TCvNK9986hvFxcrqm6ZJ+m6p1h917U5lqO6J7yksMtjud0CV8e6OXZ8tYWtrOraRn6ycEgBHnNRuC4qY6HNGxa7d2g2XfvBr6i6ddHBw31dziNrv1w+w21SfQtXuGdIdQcU4Apht8fbWGp/PFvanSCkuu8+P4qOaavCVVz2+9Z9gp/j2kr0ycWG3Teq+EEPq7orHtLrfoqKL94k1p1KiOeYu1xA7PLAiupy3l3LwUq/xiZ4HJKDO7R0rchTZN0g3GarbogHcrt+rJ+pLu80Vt0o1yi7YSqNcHegq6iihGX/rsHaZ1dLbg4WBnnrla7Z7xNYWzjYo7rb3Anv7rOuu/ISVHilT5cXpKtuEkODbgYrlbp2zMWoboq58iBArSqvDO9O07Uavg61U1hC6dqZRJ3zE137J9ovBC4l7hoz8VoDPJXmrVC1pgjcsG1uJdASuBM5LaF75vUggTb0ud4EZtmuNxIYYzzlwkwfoZ4AbhByYYuPwItAFyudrodTH66HRWLXe8T96K63wkDX9X5to7reHy1avSUvOfB+YFvnenMRXAjMROE3NvlcHCRA6EvgQ0ROabOM4wu/FuhR3fWqUv4KLjBxvSP7g7leyb50rndIW2mvaH9Q1yvgsgYvB+8YrpfHj+d6bfQDrsd/puvd0iLXa+G/63pNH+gNfFe43rXkDxsf17uE/bRXh6W5651jLOd6vGjU9aJ471wvBpM81wv7mpYGYTLnerxfwDP2D+p6rpRVSwzPmrgeDNFdbx+7Xy4OLZQs7GDU7XpbuDQCpxugNSNlSeBv43pJeqaebuJIrsYNtnaoNsUAVU+xr6WncY6+cJvtm4V98rKkOxhuTD20sMbNtkizhUminmocMZoqCqzDsFJPo0wT8flthzUN4RWbBiXvAN/wqR93kpmSGAiXHgSauFnMlG55/0ypRUfSpWtesVP6x+MWpTo9uZdw0ZgpnUlUGWdSShVcXGVKVUxETOmQ7stKR7Q1LmGD05TyPqds4QC9hSmtYJhbWuJ0nSn9+JIxC78Cf4yaSaG+Bea8F6i0kHJPedKk9EF/96VPtL8pfUmBR5JgLDCBUWLpXVL3BPo88VEaMGoovK7AK1YQTelNEncEHiXuSULPMJU3JbERLu3BeteUsrSNLO1jfd6UtlhfXqJjSp7AjpQ8xYuKSxnMnk1pExudprThC9jvaSlOe81SUmoV5r0+pSjM/UwpJhDgBUQl3hRnSrwIypQc7AibksEyuynxIEtpHda3pqTweR+VYOg7OkTfNSriozfi7HGUw8swwu1cI17JMzpDE45O4PtnVMPXbHSMUoyqMKcflUl5AeeIp3JHDSy7jS4xZB5d4Q88qmPoMLrAru/oH2UeSO/gM3J0z7S3OGE44px3dEPOKyxmR28wBB294Ksw6uLfPnpikZ/xe4061PNox9ijiW3fEb7XoxEuXxyNsSc9GuD7P8IHZtTDezeaM/UCY5rRjPQbtlmjTz5/UStuSl0bfZCu0JwjnrUYLTFYGv3ARdPIYA1npLC2MFr3eWujKGzcR/RfMYrgLP0oyOcADMxGGfjKG2HgP+Jh11HCZ7NLUmID1qOjLDzyjPbtDzXaxSGkEc9V6nEOqy96nEfJ9PhAoCBwiHdEj4sSOhIowThfj8sCvPRej4/xzdTjKi5XGZ9I1KnAGc93jM+xxK7H//CN1OMLibuiE4txg1+48Q16kPGtL2AngOMWJjbjO5Raj+85XBo/2s5Rj5+wHq/Hz5R+4R1s4ze823rc4wRo3Of9bGPesDgewohCj3kqZDyGEbQeTzD/G7/DDECPcfWyHn/CZ5cefzHmmxc1jhdMs8RRDz3+5Xxv/IcZ4SJi+7cFbkFehPABWPjROejxulRLscMbayZ3eNHa2DDA7/DYLy0Z4DWP46DPhXTIF7Ajw3FY4iICUYGYwIZAnEoSHC6Ok8JLS84ZCW0KbPFY9nhbfkZPmDsiuUslezCR1+Os8PYBizhONCw2bEMs9mG1s8jiv77Yw726u2jMxY79hi6wLrLYwsFS06v57KC9d0JL8N4ZDZN7pzgO0qsIrwwrWdM75rnCXhEOJHuHTHQkAjmMwnsFn7Gj6t4BHJia3h0OE/baOCfSe6C2G2ydmF6T6Vtwetm7wlGKXsN+iUzvmvx/eFlMj5eSm94FLCxNr05dHLWYnlyb1uPNr6bXwwHG3hBbsaY3oApe12V6L6LijQbhvVcWoUPpZx76w01YqMOvVOubAjPSBcXmLNwHjzT2pmR9YjxgehEshhkY+gCi0gh+2tD3QrxZpRfkBKznsERaBGE/ZHqGLIUpgunt0fK7t0vevijKCmwLbGFJw8A6CSrSvOCll+JtCT04tjW9jETFeX8E7JwQSmB+tnjF4s7iGVtmC9wBtXhHz7SY4L+zwC7UwvaH9vUY4oO44C2UC/gLWVv0sFi5mKN4iy/0MotPLGUtPqhxhann4o8GWY997Mwvfsn6sV3UYglb2EUJ7+ARepgFjbJV/MzWf3GI6ciiAFcRiwPsry1y+LQsztCvL07RLAuepy/ucRm0uEsHd0Ve6OUUt3nuu7gFy+7iDm8kK3o42F5M46QG/OCAxRswixnSOEzPnCL2qp1i0ue2LCRo216MYHvMKYbtr+4UY5g9OXDic2AhQA9zRfr/hbcfyDmYvDtFFzfUwaHPloV1vB9OUfmc0b+1RQOf4gXOWS0u0SctLlBnDoKc4orW8sUfmmoVlzgGX/zjsm7xV+Ab18EVZ1zjLS5Y+g9eWVecisCXwCf95RXHIvjONd/iBAsITrHvg0QPS2ZOcSjN0aXru+IL7NKLrxR4hKWyU+yQ9Uz7r+ITzMaK97yUr3jHMy/FNhaPiw9Mc4MXySk20bE4xRa6d6d4i8PJThEbvBYaFL+m+D+uCxfP4R+vWGdD1tBJOcUzTLmd4inP7BQrPAZULPM2t2IVpy6d4jHMS5xiEZ6XiyVfABnnhZeDvV5RLpstHuDjoeJ4ZVTkzP6QKo59yq1truba3s5OquNV+k9arPNABE6sHFrAKXKnE/H9WRoi9ZPy1han44oUvwBOB8uGDg7D2Obs7PNUVEfeTJyx+bKwI7ANA3ank6HyTV6V2klRb5q7AbjlB7CBMxOdOC1dvE3c1mfH/p6lMIxQnnh88uJk7eMSF+VlYS9iJw8whMGJ57mFHZpbex49g3ncQVd2RgGLSjsRsS+LsrMSWEpiIoKsFLJXONjcthBF76K8CBbL7YzGWBrCORllZzTwAONxGdTpTPAzdN5JR/jkOTh+M7IwIA/3fjmdHp/7pK+kL6RPpM/0vN3pMPBoJ44xLBRZ4gep2cyOROUK9xlfYhrtdP4wJXE6S7bgD4+mdeYMLEhnpN9wIOZ0Phn4onftzpT5fNCZd+fU54fmM6zUO50aDBg7xzzN0akyUZnGip0KjpR1jngQC+d4bBfQOeT2TedAoEDxnIjn6RSk80C3kJ02c7wjvadYi89NbBR1bvh8jWZ1OpzZO51LxlzRp2WnTk+bnQsW7pwx9Hap0te4nVbpI8y57ZDEDkbHmtTgLbEDFzt+sYMOnV0b82DjGL7L7fDDfivsOMW+guoyQ2886QaNpMI3cGA5TuELOYbhkh2c2CmcHY7gvdHY87eDi6M1O6YwxbUxTr5iHBlcG2NihrFke82OPd/X7FjTvoV24Ol8ro3xF7TDRxirxHFswQ4vdcPmeYljpHZ0qYZrdsxoPysYMp6tjWHBMK7jHR2LryB97Euu2bGj/cPZgeTB2pi+YuzA0c7Fxx282nb4aD8klzgIgwHgbM0OGi13iFfbjhjtf2k88+XX7NjPDs/t2ND01uygUNvW+cSMZvxD/hJzTTsWhJmoPqe7icsknHukj+nxTsOCQl0muH6m6UwqXUFXpfQTY/bRzaRPcXZH6R6XitJndNOk57RO1bzKU/9iryidxypT+ohe5tL4Kqr0IU95povwJK+04Z/XXccBW+VquJ5zHV7z6brsH1w/XwAXN6UpN0i7ajdkf2P7m09PLTWRmaUlMEqeJcp+JO3jOkjUksC5JbkjkBLIMUgFCb8h+HuINK+Q1E+fePYYubVnadIBw9Qonbd0+8MSJziGtCqAHkDENcfkhCASJrtN+oD0420+v4GO5lBwNQG/htj9Gz72ERnyGHlgaRgFDqNsYQaRVzgHcgJyCXKF5AEmLFNJFjSiLT1Yocg/UKJLXUsTqE4CArrcIEVZE/9A6iD3IKzo8IrxcTCqqFwog8cnS0JIGkKxnYjB4yvIEGQA0gNB44RGlhSRp/OBjEJ/ICjUFp620NBbaEvtd0iRmz7fRAUC0GxeUE/1jd9WRVFd/YLSO69Q5xzx93+5sDQLNVm0X7YJ4Q2UxEmhZFHoc5pgH97hZ29ZEuyAPII8g6A2Qfxahy94OgPBG6DnCzBR0iB++mAETA9CTmDJZ9RnjyUs4HU7Rlq1PCDFT3eMDNQxf5xjNF0aBdYR/CBuEg2q23jxzB4K5vTxWzpTCPrHfJxauqGQvIls/D8I41VzrlHsDbxMGyiLE8Y7ZpLfTMYmzaGhA2AHtkDwbuk62jCQAEHbmBEaT22lEbeCdvWC/1QAVQ+gIQP4IQPvlhRQ3gAq4GaQQQDvhorjh3P5H1OfKUtjeJF0Ee0RY20raNsYFJonFEknrknxF7hGC/rJbaAkfrxA7s4+1OPlUWE0vx+/pf8WBH8WPxQ6j/hN/GyLL7wwB2gr7cRAH/Fj+yGgF2FKNEkLpPhxnViH4vgl0mjgMmpQxnt2xJ8rhJ/uHXry/FVDv0ilUShzCbEoUukL/Hei6Eei/G8F8R5G8X/RX0yMbNQ13rZvvBFRNEkUQZ3Ef88Z4nf88+PxBconPVYbv2k+yke0k77nz4DG+MOv5ILkb6gA/4c8fqC8HQu6MUwAlbtBT6tuHOuMysV5feUmcY2FctO4fFa5Ge6/uJtYw1MurQTcbX6KXA8WCsrd4QEUl5dFKneP1tAudneVu09vh26O3w+Xnbx7gN0Ft4CBhYrXMY5U7hGd9LllfofdCnYhXHpAdKuYOij3hO7R3FP6xnPP2P2fs7R1emB2L2DsqFzcJKHcK6nctZ1oKZf+q9w7arvnPoj7wBODlzHmQ7tu95GfHvdJ1DxLri9M2iV9pfAbTr4ol+cJ3QF9YrlDKdRIYCwwEXin+1R3ynM77qeonUnct8CceyXugieM3KUwfwR+BVaYtroZbIi5GW6IuRk2UvyCrFO6wb68Qk0+uKaTicAVUwajVZ0J4BOqM34sOGVC2IDSmSDjucqdcbFHn1mnrCKfayeZfe6sZ7JY88rg1g2d2aJVRWaH8dwUyqTgCTCzSdPFTIaySZGidaQtHHz1ZsoM0DwyQ/PKzCEcvutMicU6Yu555p5jeQp8PoApjs7cc3cvc8dN/kyb6R8wPNeZGyllk7wWrCoytyJ9RVaD/rAy/xg4Zy0umBktTDMj/CY6845lscyEXlEyNDTN9CSmy8ALFxMzvD8084qprs502HpPOEmrMysW+gejc5355QWiGe7UZWasIIbiOjOXDD5Yty8J8Dbxy394py/r/D3pIUHZLFv3AP3YkdAtwN0xALV3ALA9oaUmUhWJCqMuChS031FIHJMXvKPgxTNDkTdAgjx/mqycIutMAjGAx3hVeJA8ggztzgG/1G2/82Cd7zE/RQ32M4+i5LuE5SejMO578QVyIvdCfRush5uRTKKsjjr8YVyGNdYvLkMmT5hdUgk12k8NYzRTbR2S94+0TlY8Qw3JU0DwibwhxYIPErNgAUfUYKIRShTZLsEkeZusY2q0prq+SI60SooNU196yOcl6N4xaBPEsR82C9phRCRCGiJNkSZI96hC83mfFPu0Pjd+Bgjdkt6RvlHfJ3aDfXFqTbqg9usKTgUUhumWkpMqgKpxndFMjb1rX+AXtPrC5xvKjJYCM4oydawGGmSlw0HqY87hAOkmaZoUe8V28sOyBrHbbKcVkGc7GLdPoQfSNinrE2b7hJkq3GKWMT6zpGEWL9wlfSJ9Ju2QPoLuik4WOvxN+kFq351Xn3MhUHgBuMkUQLffAf4tBiKnIrhOaJ0Js09YMLUZrgjpR8JHmpLTAmG3S8nYCeC3SoknJtODC4AKPUnoi0VQVKKfWozLF5mgfE3magew7DBqRZ65bDNZZsbQEUNmdkU4pa7nJCh+RJuqRIV69k2YjykXTQlQsLLLcidcQF4C60NmEmWWznVHJB6o4y3O0L0C/NnmefOZ8z7AaWqAG6oBvBTo8Rfo7BVUR/KUtgNYC8/CO5sBfo5BSz1hJQA7j6D7IBsF0K0KqLIDZqRmjjr4J0Ce86ooMVsCtrOgQeav5y3C2wEhlBDYEGaZpYpeUYedyICZYGZm80FEdgR2yYxlJbnNu2M7zR2AU+gClB3JI1R9J5zfAT7KoDNPYipMlV4xFJsIfJGZnAgw8fsnaCcJGggx4pJpndMAQ9l9QiYiICIZictoASWwTjgS9Zk9gV0BT2BLIEGwk01kNHtinb6Yrdpcknn9xlCMlTK1ICCohPcj8CvQFxgKjATsG/jo88KgqnoJcC4GgB8shfs+cqD67IcCuArEhvoiXjgBmNYVU+WPAaEOJZppSsSbDG0x5MDfgmXiS2fh8BrgpqICjNOVBuFEtJyMADgtZLPJ3FLQtKkLo1grcbDHuMsSVY7uGTrLMO7rVuCGgPtQLLwMmXy+TcnTMtNdicjbHHDI2iu8VDa7uGLonCHnL8lkXdZYdEwiZEV/Kbdp/1F2WOrkACqdARxXQJ39NEA/MqT6lChFJOqVqZpBRsF7tg1NLhl3tc7QtQMoDqmjuQCUUwzcuoStGMVPC4TgElB9Ai2UqfbhmXInInd+C5hGGbgmBN8ZiDAPFT4ChJm7G2BalT0FbEpFEpLTimV3Aj2K/7Gw+/bfDZeqIYDjgppLA/B/MxA5psBqAFB7NYCbpMRPTxLdMdCl+N8TeQUKKHwQEIpTR/6Ioed7Sm6MGSoz5KajAgnGXTckzmMIH02r7LxCkV2qds1M4J2SXpuhsGJovmS6FcunXqpSK0fAT5gJcyW5h+dMd38L2DxgYFtq/HUOCO+Sl2gCKocMzOICKUCeeYWlTiOWLTxlrT3qNrGwtI+SkH1/RrY5bwFo9hHb2z4/lgDG+QIcD4R3BUhcguq9JXkKFMUa+bZSoB4TOtUHQDpE/ekcAwY0V2Nyf5P6N/6HlsAb0/44gPKcgXIe8H5ARfNHQoh5q3EfUE2CuqZLFbEbhtACI1+BKjYLzPHtjDFR1vJQlP9tC4/ls1Ns0HfSESm1hIdMn4wy0+es5LPOKNu2Y9/qF/RY6Bw0oUFN9R4Q+QPVkQJ5ez3AyzcDH0Gm8VNsi2LOG6DwCurcMGnujsJzF7DTZGCrJzwmqu6CblyDlg1THv0A4hegKhAgwHPO2Bdk2fTtJhWMioCjUwkcES7zgEMG4kPQ7RTTHDJpdY8lHIuaE0B0BhqrMnmNTeBs1wTOBG4A6BzGvs0GaJ45hyuMXmfjGZynHIvTAhxCs1S3ywBTqBDwOZnYN/GbUE0AlOMXCDNB45ahVkmY+wKnTL5VFDgUKAkcUVnEZSgYIzwwdzdI1W60RZHrFyrzJPdpivk1O4DtV9AsE+NLbMW2tqkpMxYVUoHMkHGrjoR+BaZUmLqQrFl9tfFI9bcsJHpaG+Vq6gosKFEj0/k7ldBS4uAFwldxKL8ZZ4CNpu4NFe7YYr3bwWuWUC4CTNoT2BGIApz+WEQChEeCwdTx3Xa9b2Q+pQiVJOGiQ2hWKJlsUkvmmhCMC/MXoA57lDwcMvR9A5j+MLBFlc5PG7D9wFQvK0BW6AeTfkpR/XeES03mwRfTXh8Q/j4lxLpNqMm5Ik9dMeMJFZooq6SGlLheSKmlCbZZ6Yn9Kvd8ewtQnT5iYATqPFYBiU3GLDzAwZgx7iXATeYkipDoMHB2DlD+V4b8FFTzKJO99gg/ScY9SaicBgTyFCw3yKvUAe+vkrhJmHxL6BiQZWmyAbICGwz8ghr/DXkPZfJInXPWyjxPGXrJAnKUw+jPSvf9gOAt6P4/lueDz4dM+MSCOKM7SZ8XaLENMBzr+9TjPUDfKoaWOYb6K4BpHhGGKUD8FtR7FVYRsKcpfXYmvDFDixdCvgTYeQJ1lm+SF+UdHCu38hMK7tQk1YA8cwfYOKZ4vkLeiBmrzBwQ2JfihiSqQcgeUm+ONdKN/0Osyu4VEz/MRT5JiSrrVRW91z3y6kVJNWVoxiqrwQZg8gzqhnap4++dgsMvkZfM7jMSZ4s18PmXoPrxCaAwMx7Yf28EkPCTd5yjhHPN0N4+JQoExx0IMLVxtgmYKlrYuAe4wR2mwyKFZb4uJPTOdJURVeeahM8UIZ+mSPyECZL3EpoDli0mS4XJe2kIsGDRf6RXTHTaZ4CZ6NsqYeYACl+gu8xplxntbpDukXqkW6RSGYyabCDKjNGXgDeSkDRWVOoZp6CKLYX5CaiQHq6TFbPtPbQzl22A2rsHOF9pgbhATOD/uBwg8U06J52Sfki0BH5BvaawnhhoMeJdWAOyLhj4YIxTLQM2mKfy+1mseU0kbsmcDyQ0FegDdPNNNPaE2RV4YQKvL6FXQLwOmjGgR0VyfphJtkrhQ1ZTbVG7qp0y6ewAELsDDV9LTjfMNyRFm20IbArsCOwL7AmsCxgBNnjMtvDMDvu7AOdiCjCFqoTuGLc3AajHHuN+DEO5fQmlBHbIvNhlaMgE5nrM5PkDKpseAzLboG7mirwPZvDeolylwlRlybStGUrOCd+U74WZ6nRF3ssDYfTJrA/PmeyrTJH1COHqg/CXFkgJxJnueZ0JEicCpyzX+gshxero5LYoyzOHln37v33O5hHh/A1gfnIMtVaExzOAWr4D9KJFEUzdv32JMih+CCuHab+FaZuwSzndpH5VFpEyVTnLP0LfBfwaBjDZsnqv1xm6uJTUBcJA4uYvDP3uCFBlNUB575PZdO8pCIdpsD8+Z1xum/JdSV3/Idyy/GpLavp6QEjVCBPG6fKJxA0lbkyY5Rm388BQQIqyMwe4SVb5ULILHYvEF0u0KSrxL7bMq0MCXI/M2RnPfcfXoLrdABinLKE5Q8MtgNN/InzlCY8bgFCB9ID0HLT8wTTlMUCVKYzFWEtfSF9JV6Cu+mLgl/SbdEYqSkaM0CeXhC5TOhNmpmYTQHRAysyi5BytM6nUSs2Zs9p6o4YD1mfjh7zPbUCM2kysSN3xP4a6DuWehceyuIGVhFi+YIlyNY8BKgrK8w5oiq2osi0ABrM2IYbONrBLKTboo+TM5wrzCttCL/iDLHy7SVBdnwDMX4Nw/g3YhAtyO0AMkef6GTqqAFwD5+N2zrQLQM9oo2IOBbHWZ2FTU32c4pk/il+TnpxSPD4GFLdA0UXYUsQorQ89AsZHgBuBNhPnvyR0z0zSC6Zmqd1wE7CzIYW5ZcwTpednhDbzcs4pZ14CDP28i0LWwaM+LOeDNWKOkVsJiaqXRwHW//tRFOJWITuP3wM41XUAFgqmdqzyC8CigA0kbwFm7xmgFwcUd8sArAtMuQZgWcsrymGKM+VEH4GhwIByIWrI5Sl+w5CZeGRi5GuZ5wFmiU0MG5ow5KgzYWoBhwmw+mmTHx0BsP4w9R1tUXxFjc41q2eutlnLT6baJM+JrwBY4LAC0bCIs4jO+rek6gvzhIIT0nfGuA1mn5CGWrcvxYdPT+8BTuFaoC5wLvB/XEvglgn6b4SFItMNEGIeAPsdH/Yb/A3A/NUGqq4kXhfxEWF5BjDXBDWhYpOsMQ4n9i3z9RnwLvIVFmRjxcBumBDMM9XLmYRYYvNgmDg6Zxkp4cDJ74dtfKqvfoJmO6Q3pMw9yzpmmU/2gUq2KKpeptTSbAMCBeoKnACCVeZZpPjuArTC9tPFO6YZvVPDs/2gfsKexFK1d0mIRMnrgTr7EuiCljKM/yNgZvRpX+Mt4a0T+k1G5UGPO6AJchIU/iAfXYLlvIBqmJxa2CsDTKvAkKTMsTTO7i0g9MoYKajZGjKqQjkn8kiY7rAI5QklK0yNFUvLy6yY7LsGqO5TIHgi4nUmPjoGLIsMnJ6LBNWrUQzwHWRg9cGoF9ZKH8YE2EauljZ4vmNmV37q+qOS2AVokJk4cZZwswUa3ibrhW2isizMF0uhdzR5EzYUlipwQPgQYM6/GNqrAma/oHqRZlSBgj998vY+ybuYAEoFUOcxDvAPGRB9TvUA4F1TGlOvL9+WZuDVAezsUV2uTF7ygYnCOwxtGQD6OUs90gXj70Vd8Zmh9ShDNSnY2ykA2xI81IkYjMas3NUmYHObgY8nRnUFNm1FV77qGHR7BOr84Aownz73GFqGACq7Dcidk5elwPslIIDr23wpslJ7jDg5ZJIyqLm+J29BMWd6AXCDVKrzZ4CtFwo6n0yFPTobGt5SIrQtzCqzepQShSlxUWMIC/2QOGaoaXNb2hcE7j/sVKsO0I8fZK7CAPX6wtB6koBNpaUv4IgEQYUuAbtZJsYoaenDkBTSQ+pNuwBXpQQSFHy7BRz+MqCYsxtQDB22GEpeSAGqVDI+lzxDZG4xA3PFwqnInHEvUo2miIwORcsxdd7HpG43jDu+kQQ3wryVYk4lox+KFIoMfaxTZFYgM8viuus1xrUI18zcjCjoYEpiASsj9Hih6ONB0VOFol8L0DxoMQTqTMuMboGqiyPC3M+EPxSYfRKWLwCDcQEdQSh6grDUDTIz5zUJwD7Vj2/7lvEJRuzWGPBA8b2mnwhLKzekTVJy1AOj9fce4MhlDAuOxU773KDUC4tskudUf80c1YYUdkppsvTXH6sxZluEGfEFGqAaN0wpM2a8+VZUMGfW+oXM1IJRE9Yz9cZ8/vqArB2fwK7nA6CqUQCW7H9tX3zHqEmLEDkHYFxgozIMxPIMPFQAO0OyrkCXcVAXO4kWsFtvxW5E6/KJ8PzIOJiJ23zhvMDmcXlLZuibIk3JH/PjX1uRoIQUizEDxYfRsswRBZpBye2CoWNmukxRU+yQiagIAzfLCk4AiwDolgtaohpTzEo+ZQo/S0uwCVSNeXunoBtssCyLqPdYboVe0vJIsacFBztTZsNiTDWFD98BAWmNTdtF/vmSG6D+OaiT2wLAd+Ofz9y3yVsFGKquGPrrAX5ToLohUS9dQF6DqlGJUGgCdg4oUHgCxHZB0Vv8+VJ+RvhFzzuz1V6SSfH9QCKWojph1EmYuWOYgSKBbu2wJHEKu8zBdKXkR+9MNItIpfrCnAl8CkwFxgIjgSHLJkrgifzQDh7/ERodQr8toR7ADGMSyhMwBDi0M/9HwF5KJA4E7kQiKaGBwLvAG0BdaKYubBF2Z4DcNZPlL4TH0I6o2joDhBagrk6JwD7gvSIBRblKkdDsMhOYhh76spQzhjyNLUYbGn0JDAXGgPgLJV7WqTGXZuigCtgOk/d6SfFZiepnbJYsC5BhoUsBxl95hD82lPN/xf9WgK9d0NQmdb+xefN7rNX2EyDwzcA6i5c/l3YsU8/Mdk1FjimLMFIGvSFtgyayoM5FC3AwBTVpXCyNwwmI/0f6AKqyCUI/ToBRWtH+tYpMVMB90TjCYGmILH2+QdYxKTNQfqYxkxlgI828yyzOhmLKFcUmZGHcbdXk7imGda6iz015DEV+mAj3XMN0HHSPlPUJbFI2yqz10xczPdonXF4Atk9Bo0wZ5bMqjZh0Qf2vZQbeSd8YP5YM2CAqzpTqio2o748AGMLaQCjDAEsdZPz+jURIW/9RQ7AiKfuA8JGU8JKBAsWeIwIh8thu+SuyXviLOqtH8iRTj+1xbZvvyPZZdYC/A2ow3LQ8fG8txDKA0imobkQI7T1G7T9QvpAFuDtthl73BXbJTBUllAaoZorJf8+EuUH4PhEmlWn/RGDAHJYCQdF5HWNck2VR+JPaHHDaBnEpYf4xlKMWZVgTrSSEfUubwLwytLEDmI5Bn1nY6Tqlsa5s4wMXgD/WXr/fsBz3BYETgbIoYjuZL2pS2Os98u2XQLGJZZ+lsa7YSs5qJPBGGAYJ24a5YPEaej0qvMdt2vZnWADctB9gzj/IdHcIWMKzzCFucrdd6hPguMsAFgVtVDUE+JlQ/EMDVKEB+B0zED2h+O81JcoEfT4jPM3JXDoMfbI8q3+gC9LkOlXA+MlmhRGjlT6ndIDUmYvaiRFgHdT4jPLYwy/hAhBUD+aa4HkCFCywJiqdFPUpqVeGcCW1nBeo+OqbsFLUFagy9Iw71nyPhxRMUkJhOcPC1SNVXrGa7jZF/ipMdX1B+R3bwDnfagKqH48Asy/QyAeoUyCoyIChR0rryDUgNpdE25Qo1AROqWKDUeU/wtk/Qv4EgL2nnB1fuQCT25RQgKFvwxDWrGxmN0rAz9SDM0KFSrZYUKdSIswZMi91SszGhAuWUXf7gOUDJVZD8mBAkfN9U0C9XFBHiuU22EO2EnNWxg1cMnT1ztB2jJIvUp4dijj3UoSUSNZ2qRP9FMp6xzjMCizER2SGYqLzUpQ9UfX6iqHEOZMP44Si/AKrFuHhU7RERPKRIjFJjtXfvG3nKMA0RwJkmupU4JuQjgOc1oKhj0eJOyK0fgGqtSWhuYAoa40F3gUkXetZ4JW5Ryjp7FNZdQfUTRcAcRbBmafIC/4DvLcZwGagVYFVDasiNyT8Sj2SUvLkB6HMBGb+K8wlk2caAkznvDJTcySQZT6muE24PBNgSG+wkM+sqYNJgo3KsHgmdg3YZKb6rU+J7QvyWB51dSBRLEHeUOBDCjCqsDjrM0KX+jZvARj22kSh/wXuqakmlR/bZjywk5Q6QMHq/MDn9E/IHDoAHYkB0gNKXLwxCocwD3w/Y5G7Z6r9tkT5mQo28FZ+L8u48g/jXqnKTR8yNGcCfX4JeB9SML9OqJwx9fUWoBqm3O02YMkYfVijwPiammZRMjeqkmoFCJSoKfdBCKQoeN8T6BOKFcYlWICHFsu2zpo9SF3QSJYXZy4a5x+s+FVCQq8U2bQtXfCl66AHz6AGKzYFXykEenxCug/q7r5QNgxaZcRGErT8xedd0CJFN4KkVLERJWWqjQjpA+k99bT4XAVN7vB5RvpNbSxdkeUqsljZPVAdbADiOVDHY+mraQZw8KNgv+pjCjBlnCnjr4zATBYKBqzrgweAAajlnbCkJrjJkMSI6h8m3biTHNgMcRYknqXUks+asjdUsrpi4F4yE8VunKH5AQC2YZZugabYIPk/SsW2yWL98yM+23cAPhMfAXpPAcwwCjj+ZCDtCAQpETmkvFMTZhPgLUHdIAUdWERYwRcqdjARs7D7x2S4EeHEF/GDlqnJ3S1RQELv2wzkmbNCn2I1PVHT1jejfoaMit4wdNoVGFBwVpdQS0CKc8rMzMMxIMqo6hw0WxdNm4w3VPHHiqsYq+BcXVCtd8XQAwtgvuwnsWa/p1HCXhuAMVfNDpPPCRdDiVoAYN4A3o1Ak/D6Q4nWLkOFCODngIGWX8ClBMYyNvR+BDCv6wDdZNY7Z5Q4LFLi51BCBBdr1zaE+SXi+kxWaRE+qcTgLJGNm/fInPWEORDmu0CHWrrPEnoReBP4EPikyHeSyS9ZH9ekAfi+1XybzPt7RU6eeY1GFO5OAEw/OSUnxuKqzwzhi/kprEPZjOJsU7O5A6ho0C+mcrdLgGuRjjHTmC3OqZ0PBAD6zAAULKdOfQcvoM7FBaMeC4xajgnYvj+17/Q1Q05eoEddyQPAR4nJcHrTwrLBqFAKANNhq7jFXJyCR6huUcd8l/L5JkPldQAmj1bi5hKASYPNeb5BgVeqV80hYK/OQL4MwNb6KaeNNqn3TjidU+CBdZ6eUeCL+Z34GXPI8mGDAIkc5jQ7JbjMEMewrNxXnxLb9wJDYbKhYt8U/2Pjqas3SgTYeDpxRCjeUOTLL4pxb6f9dF4DsNYNz27/AMoJMpQwlMC5rjM7rXMFGEr0QN3UOWBrizHNFhOHJPSbpASmqoBbgZZAU+CG2WjmpmFSd+YrslTOR4Uw3WUJWoeM6jCQ/KR43gMc1Rm4TRG6B+TVWCMWXsGs12aVeae+oweCl6Gmo3+SeEI4kNJMRCMl9MkeYeMPkGFlTW0iwBZQMdY2xXLCUOWMGxFnvnyMz0NWeJNttkst+ooFzzPCwVTtzPfF+hlY/9iyrh9Trpijhn37uS6znmUfTlSVbddUACjYK1pYjsjEZ8MC+quybbE7AH4lK3H6C1g4ElgA9PGQctdBgIvPqWU+dSniUUQ5czIHvxJS1B/MAdC4Nqr7RSUPASrBXM1KKA1Y+pnqagdQHTBmck44CkpBZoDMuWTyQ95Wh3AYpt7aJ+XjRQC2fMrc8rHiV2ShjW2+4Txgk9k6ATYKmrrMNras0Ypi6/ZNqNBIomJf9BzA/IwA6VPQvSFoIg1aIj3+A418UrTqB+DcWsUXpaje6wJw142l36LXE8gCYORiKbPS51GAi93iiv2TvAJwDsBGjX4ITyVmNHEB5TDo4lxYzNucJgDbE2bhsQzxKmOyBrC7w8JdivA1AwNSVlMF2ALxXca/REWMZYSloRX7IGV9dzdBYeVin2Og4QgTjDcAKaZOkRVkuWGbUqFNS4U2LTaWBYlOQfdZA+09UkeMOipzSrEeMaaLUQesWY59/ndQtSwQDp8AzsUBwGAIemz7+DpDe//HbQNKJQlEJOpIBD8ZcgLCbArz/2QM/FKVjp1JqMOo5ZDyuR2Gdk8JsFexIs1bwucWRSZpwLELWo6JDhYcpzVsolfDWrwUCbMPJsKpQYT+ALiEyQrCCgmgCB4VmocXajxgizgYGdpk0aCEWMbHV/Jwy9Oxb5+s/QHj1/cl5o2l6VLfJjN2nqnBdCsM/bAYu30GhmxgE/2SEIuhiqzRRGqE47X/sXSdbcnsQPT7+yviPlsA6V1UihRRLCiCKIIVO0UBRfHH3zln7gfnMDWT7LqbZLPZhtw99gD2+SvAarwQXi6p6z0T3k4BTqMBwMyMMJNdwJAGHr4DJzGyXULEoleCFvb+Ju1DdLa7FFp4BQGWTRWWVNhiMLw+KZBaUhge0OQywihf5Nx0S+GEgK9CNAzeNBQmck3DXI6AqV14j5Rbp8nRswqLLAArNYTDki/RzZifs7GvHC3HU8r8rN2sQ2btifDHVsNYF8yLZhBTTttOxsteuooXntsmdAxq15cA3x2ohf8H0TyAxjdBU0NSSlKPoE7DAeyek16T9kl7pBekXQbfvgVs+0FzDdCBzYIuNJJ60Gj3BnTzA3Sdpm6btEPTyYgBf6mPMeC6C5rYBo0ytUQINM0i3Dzr5wVpZbd2AZgYa2NDfPxm6s6Y8a1RFuBnPWOsecwjpbuVZY4xH2iEgQNz0i9SZuXcE9xVFaDlBhkkyIyCLBYvt4nHASnTwduiIo+CJmkzUMsjSk5pw+pnWO9gi75sYayrlgo95ghYZy76T1Z8Q07nDu9PHfOTB7WwCkMAazYE9vcBOco+VhSVzwD2XgzgBbYoPKK5U/oCYPWcMPUsVaUncvjkYse4dTXsrlGXvqNwMWVIPDsX3deCHEYwopv3KBwfUnj6Sr8Dh0K8Z9Xh6sMOFwmKKMa62OsWudUtAbMb4pVvMWLmQeGG0Ge13bD8K2Cfiy4g1QR1MhWAffyqQAu39aBcA4C11pAdkAmDfrxThCvyufk8oSLO2M0gNVjVLcFLP4zz+z/3R/jZoCWW/p6b3zwt9i2AZ7u0uNhmEFxyRFci56RnWibTjp1ThUVD4hZcJxfVFNxn2ic7hJyfYFHnLpj+uE7Dt0+q4knC/SmDrPsAs2sGXluSaVHzFFdI0/zjhgHxMEosbkFLbwzbZqu4ww9q7mntp2pGn9mQOUirNk3oBHReBcW6lqZ0NH8B+4+kTxRh2N2Um0Ee4La+FGiHZScSxqKdN6MGa/YEsKkEOIcQPQfUX8ngWoE987eUa5O7WxDw9KYpHamBwjWh8sACciuAf10ZpuiMWZpdiVP1BlrWUh5oZ8WoceaHgAwrbLeblLWZWyZM86oPsHVLZlQjDFm8ixfOm6Yi/2snZv+c9AfUWi4AeE1SmE4C4Nl7gF1St+wH4BHpCd+MFFFnRXgCxbuO8hsvTQECCi7ACcWUSyskFOJaxhFN0gcq3FeoK1QUymq5rVyRKeY+ALVv0ASzxhsjJ2ZjxHi1L6o1ON7gODFhFhhm6ditQWLlmaY9a9CT+o0UaYaUCdeYveWvKrD4MT0wcjvB5tr8cEleIQqIaGvMeipjTHdUUGBsF4tcYDIHZNis9pMPEGSDe2FGD+gRKawz0oZDrrqjQnKef4OwJsJTudT4AdbLGyC3DooXAESDrV5EsyRn3X4TnG31eiQ4xwC8dHhq5vR1P+K0w2tEYoDNbQR+d6nDKrRTs3kG6tTLhHSG8EPOwuwP4FHhim4P1NmtFYWLZzoEGMUaLambM/1FXg0vqXpP0jA5JxeLAHA4Tg3WuJ+aMLPxtpjAFrOvMHaSue8F6de+J+Dhp8CbzYxmaeUsBUaYMfTWm5rT2V3lyH32CPl3FUaZJea6wYWV21FO4rfkvz4HcN9cwPyWslYesOOBOr0gwP69A0R9pH6KsJRJXBPHhMeRCp8ZAgM7EdbPtJBDcukfABZ0S9zHHu2jLap8VQpxRkP3v0lHvQMKCUD+WkMxufE+KDqBiNQlE2NA654VwvuuwrRDjNc5UThVkEKupE/4CnDQG7jiA/oruaz+UPY1pkVqTuEHvoApjXNOIb5+LsLUgNy5RYfIKeBnpsw1LXo9gLeeoSEe5YoQe20I9xqg5cUaub064dev4FNYVwjRcm+H7ni0jvxYA3eg7laRUKmrMEGTXIWlB9PMfcHylszS/mNda0mav8co2/kFVC7ItL8VNK0SK+B0luSwOZS4jWiCBVtSiF+OSV+GiD8A+2UAsHBU+gavJvTNh0ODpgv4WwP9+aAIzx/60hCP9EmNKMRjDOGO9gH1O0btXlCFmbG+SQTIhCu0wz1TLKwXCj0W4ny9kRtYgGSRsiRj2K0EoUKVn2k6WPkAOFaoK5wqNBVOCMWFFsNaJJlI0k/NPcPv0dperxFwJol1dUshB+iwgexvueQMTKoP6uDNJoAFcJvf5MorcqF7gB3aJbf9QO7cJWAmf2B2P0EtrHCWiKQuBnMSY9pUOKYFZt8HxrN8hECQusm3cqrDfIgIayzGr6UEmIEz3iZ3+UzAs0eBgwwAT3sHnBAVu6McYJkGvU+qKyOsmJqzxqguNk8Sbr5PDr0yCYeJUOGwmZUk4x+Ty7AoZ9BR7hyAV/OkRpmyyqT8a/mf2yR08eF147zUATuPoF4iRxk2mro2fxnabVOFF+fATBXo5K3jO+ZykGjhOllCdsAYWzNCsEhLfgBedGUK6z0KcY0WbmkxCv57xaT4BMgzlnV9RxXm5sWwRjc78E2wyDnKue9RcsUfgF8LDTIFG0uRhcMoTkK+NSh8OqEQU1rQMZZVpQ7vH18bDGi6MlToAVw8Gusa53YJwHVQVC/HVGHnHlisUfh3BbBxOgocH1DnjpUrk1vskLv2CPvfhLsUdeNfFe6yuK0AAJPrIvtLsIAFU3CaaQor38rFCF0KPXxBUnQPGxSObXKnX+Ryys3KyrFa7qALwMVOVO8v6j2nRYyFF5iW268whX6LHB63dM0gT/NFAfDl0Cl+SVlbVTlyY9YlyezxEgHMGNXpUO9hbHbBuYgLs38IiqmIC+kj9wEBijAJccFJiAtOQlxw9uFCRrJZMlekPdJL0gvSLihmH+Q3o2KSQdxuKcJcw4X0i5mBE2RJmHkQZm9TVaR4cnfBSRHxiYDa3T8au6AYVFxw6uGC8wsXnF+44PyCeB/7AH5656nGKXnBiYULziJccBbhgrMIUnZ/BxCkOkhTTB9ccOLgwkReaXrA3790mDP27oAiTbzKFkkyD0wjXBg8TLrgZMIFJxMuOB1xwcmEC04moOgp1etUyIXzkivmL+XGdUWmBRqvkFKBhfOXxp8GtbJ/gFSBpkFQrLG/5FL6SxkGvwGwyP5SOkFjwuMXIPpNg68UmVdQbNgF0S7NHlhwbIcy3Cov8W1HJPZ3r3BLmP0od6lwweTOQbF0/pIr/y+5jP+Sq+AvuW7/kuv2L7k6/5LL8i+5FP+SS/EvuRT/ksvuL7ns/pLL7i8N1lxfcp39Jb7jCO0vKauDlfWXXCPfMw4GrYBHhXeAheljcB8A+/aE3NsPwesD3BQBb+kIPaAI21z2+K6OhGhlFMIArLUWV7wPKLDcB+CVHciYhrdVIViMZOP5Pz7/xoB4WUcMMV6TQt7pjFd3emZBvXucpcH8mpzrMlLwjCGw0KNnsBuQJHN4TIsha4fXecTOf6sylu9WQxTGBgpXgAydnZjGiMjJZ5tcCNTpnQGs7TLBhz3SDRbo2Ca1Q7qkojcAuOgZiP6ZmhHo9jrpNmmAIb0fwO4UNP5H99ERRRN6b/E3vbHeVugb6SvoOjNw6z4ye6B4RVp+dxkcO4wIMwbFEkBRkzpbDdUfUvNEWgD10E0SM4uiNeaTY7lRRlmSxhger1Zg63467o61zh3A9IVW1IertIo2KYqRRkkTpHHSJL3/SrT9cwF+NnaY7RNmy0Qoj7ERsX+H/A6SRmhfAw3QMsMGs9s8ApEeaJJNXGKVkkzZrbKhI4+ge2m2yHueokvqw6zMjJW1/FpZHtrkPmmdhVR5oJIsPckDkmRuHRaL7VVh1Gdch9QmZRNHNB82XoTHIcnzJflASmcsQnBMqAbqYv8UgfKOQhWwc6XMESD3/I8fD/UD7GWXsiLo7iOog9kvCZcD9ewpXTsLwDqNqxbN6jEGwMcwhfthbCd0xuC1IHWvRS13STdGQvM7plYC3eBvBy98OGZ5CBreoMc8wjDROsP83ZHb/SJ09gENxnSaNyqbAZJroDg2EP0ApvfKEIonLNvRaOcAnCUOzwwpdTVglTFHI03yAhpkRXDsJY+nNmuQJ01RwQT36IANuiTIkMW7Bdq6/SSj8ABFpF1d+edfADzsDOvKsPSYgId2rlxgIwC75ZGLhAHxAOhHW523Cakv9VoC0nT6qVNzmyA47wzkzLWsFOCzRmZrCvgt0a6u5ni/QMzx0o5A6I9QiqizTUj9ssj8Ex0qCjH6OVhMLw4HrzR5OFLdCSFNoYNNJYTDuhIxGd2Se8jQz2JT2JiVc83yRGXnNDymhZPLqjDIVFw2gju/ovCdDs4GI1vZA7WM0hIzSGLpG1G4o/n5XrTwe4WJ1utCdZ+AVZ4hY4dqQc7b0Ka/H1P39gbA9I7LFc3iW+2yFB8DOh2W6dzzGNrrfQVWMaCNUGB4NyPnrieH/g/gfnSVOyGHsSugDsAqKDAlhYLCBu0xUQUupBBWcAD2MXZuN9iw0DMPGhfvtQNu6dzDhugydAzQ/HcJqA+p2mJ463GhgA3nzT2d75kNWkN8WvSpMfd7Br2n//0DKT7uwTlpKfMvT9E36RcpozpfDDu9IeM/pvFZg9x4k8WHsUm7wYp8YQbvVOGUEMPasRq2CXh5VqB5qyUyiNu8AGQYo8DGtidaipckV7Xp1WbF7Jw25Wyp3BZ1WEgpHDba9sxqxEQ+2fROXM6qNb7QtiZHsadwRSjPAal30NA3f7+C7j+omp77d2SciEII4GBuHFxaIckI/zNRhZhCXqGgsKWwrZADePYHQ9YJWVpkM6D1T9BNWmHlzZr5TZHGQbH3jFDGTyRIVc6CE4yQYEoJemFnxzUZBf6ytI4PgK1t1+T/j4AluWKwy4paswCdDkBrbBW8DSeeOSZvW2yIJY2TfxRpHfNa/7MgY7K0GONjx1CpbjqjkNQ82JAJ+mTuQSu7oBg9SZQhPZPrpEy5MtP0WDMvXKbZ6EC5EjnvWbkqLXMqY+zgozIDhS5L4kkQYbvjVT/LRI9B0T20jLX8AiQboH+HFDn7ADwBsmRg71DWmQIw2LfkH5tMbsYwNqjrqNl2jrIIaZg0RhoFXfInTWzMvFi8Mwq9YYwPMpiktNg1FPoJ6m09MZUfpudhsRe++vkBaIFsBuhIlypz89ZeGfKIpWOobvHeb/Heb5l70hgjYSMxiYc5aov9AGFctopVYa3cPAE9G4u9RonvJpnSI9utwYqhWyjRChqAFbbxFrwE8DEp3O0t3t/Fs804uM1bJvJAUZ8a3OYt3uZRDVYyeM5jxDzDDJLcI/0hZb2t+zRNf/8NNvDpMCto3J8UYP+EDAZAQbnAXwM+smRudwA2dgsKmiuPdo0rQm9F1fZCuU8F5X5C1OE15aCcLl1C+VdN7lTYAXgJOtjLC+o+NK0eU0hFqUrlab8a0x7PrZHXgwpfyU0atLxjfrZvoNAnYLcgsSwx8mZa7TVZDJnFYrhOi6ND6rCsOmjqjO/ZTMHBO9hBo5HcAp1+A4DJFxVsPy/JRGMsFm/Mi+PXCzVpVjlsU//MKNh7RAAvlQVN4w30Xqv2x6zcRUEbO6I5JqnDO+pBs0VDu39EixlzdEc8lG51n3B4wDq9avs8sBT7iZbZllZjqQflj9xqG9BR840YADvdCBNn7qsKKxJ+VXhTYFXqcuqGuEQiZEIHoFi6FzL7VVAs4AsZPCbH55z8gN0YKDZXCnEdH76e9QJwsLJNwKM/nvSI8TEo1sqJf9kCeBiEhcx6ibQImnVA65RgW5qQye+z3BQoVvSFzCYL3wwyUpZG3ho9sFAoZKo/pKxHdUX5M3/PSJkoNkIWb5fpWtgjUmSs8wbL8NLfgAwrbZ/S0z4oAGpxVuR/TR+ARX8SLU19mM2I1X4hrvYLmSkTtr9YkwADY/dxfPOJsbD2T9yrpyySLYD1fiGu2wtxvZ+ow9cAbGIFWzZ954oiprI3oQf9sMYvzD1rwtylJoyvPIGOSB9BreUumSGoi7X3YW5fI7QAamPlQJg71YTlgnWjMpoh6zDfMhZNQwN0KXOxqV4Y3/xCCf41hmvQDq8LSITjngaa0aJMzlq/pdD3zhiob5grwsUNo6swXzEO801osU6wLpMB9XVbI7DcAFOOsvZRBsMWJWJ19MC882TwjaKwQb9HCvMxx+mjmnUYbHGoHIuzdzKAMKn990Wv4y3AkolusLmc5jUzoMKes7LOI9vbvmYTeWs0t9pXhM+4tsCCAbF9fpjvHwvTfwOUPMa4p6YUo56/7f4TAO8fh/H9NtADOoZ5SINNOq7YMlgFLsy7tHBEDkIN4IXzAN83qPUYoqo1IVd8Bcz/KOu9AOI0wFqUCNeiRIybOgPYt4zjoHsgUfGYN2JCDmV4/0/ibIGmNlSvOWA2S2Q+Rjg+ZbzeDl3/t3sn2Ixq/+4TXhN0xmo24favyE2fFW5VGCXcjSncsjXYiwpZIQuvcIhu701jHpLD8wbJIQyaZWlYGyD0ALSeoRYdF7GdbCqnBR+xkvZpGtCIgaZpHmbtXHwyRWAQodm7VqGuOl+dGc2ZkdtoAWpsT7utZc0vCOjMCqylVfhF+yoPgBfbUmDD7a2BJllvp/Osmj1qTpSpKpQJ96QbrLwd12bqS24+Y+G1S5/BoilhsILBJ3f0Q3L4sI/PpE7Ubk9hHxC6A3V6Q8LtOUOskYkM1S4E8DZryiUA9veI8MqIbrRIh+kZIVgFrJ+Cbj7SC6NnEbEsN7tU+CXggwjihM/UCoT2VPhObpJmKaU8hSGmhc3NfSamBT9UGH7xquX3aI7v/QiHLVNF975OrkbL2IQyPGSFqqX2BAy2JGKhTEYNBmxEp8Wk/KyW02ddXW9DIaewqcB8rTct5e2LwsORcqz1XhY0wuZw8aoAYJeBV4xR+gYtMHoho1Ebasbj5WauAQEe8q1zTWkLENRG6P+fygEhzCa3sZesX27gZwpdwkcZYLdcAEYbfk5G+znx7OdULYzpmlNRnCL8x/s55+w32FtGRKk7Om6BZgOgiWtQ7Jzp5wSzlLTnAfyaBC5gfvbw/RxM+DnP7Oecst9YdTq6nU1GnFJTAK1SjxliP0cUfuMEGGSpdVwMGHGXDFbqQnbDmNjE1m8a90zmkjLXXSOHjqCfk8x+TjL7OckssfGeoZ+zzH6Tb4OGGSXM2mNG2c+5ZD/nkv0cQ/g5mennXLIUm/EBMJfsN52xtgTb3MJ/ip8zyyJrF2n2SCYxZ+lvt4A9luL2f6mKq/kT47mFT1qQsatswT02XVIpmybJpJJsE8wPi2mfhxJzzFIM9iX0c5Lcz5lmP2ea/Zxp9nOmWYw22ORJVgBTzQG5avQAPxkyLx8KnwpzhS+FlcIfAFsWBuTuQcZt+gD2bVq5hBrO1KRBDks0AzJkYWRrPUZo0sHyHdBk8gDwthbkpm+ATfXGd70ELoK0T4wYubPNUrsVCrsPqrtQOFE4pcndFR2O51o4c8Dq8YDZeKLInwL8aiMcHdE6rPXF5JBYhAuAxVjNz2gxf6XFm9pj2T64fYUaK4O1K6IraOS1mOpazArXVdFhMTXcb7XUc8IdQ7vYnUwc+lFyqzhLP1ThW44QvwesGmqf0uKkEdbl4lAnYGi0LgPQPQI2WBbA8t91s7tNC7xjsC5DQZdckWD1Dmj4TZ192qX9Ny3mVHnYoUU4fIdSLC5rAMe+ow6f9BZhkwXYdxsKWYVNNblXbkshr5DRHGJqwpifIyaEbbRg0VA4VNAs70oKZYWqQoUZhebKnSq0Fc5YDjZ8kNB+rck7G8XG/LdwZ01CnqGdIzbmxg7tnax6PyrcqpsWjiVxaCPNATvTC4cOtvj92YyVvNMccgRMUoMr0rLKmPZCG7W5UKH8SyWlwa8V2oTwlnJVhV1CQi1xnYXwTbmQwgEg/kjGvlXoKvQUBgRLI9sHCscqLBECHRXmtICVcrbqNIdARWFf/Z4V/hR+Ce4nwMX3dsC9KLwr3BPwBh04zdOtq/CBfvmOumuCriYYzCt3qFBU0KoEd5TThvCzkeyAtlVQi8WerwK4K6Fxo8qtKWRUGFR3rVFMS49pQWt3CnqIsJMqLP+PMlTOVXBUqA2CeW801v8lyKmekqFdmtB6BoQmoC4+z5QyuwHV4AvcxkYnCJwfULwHteohmkfp5QXIYcVbiuv0xBo7KohvYAqo7tL68ZO+h0VaTLcJHzNAgiVilWCKq/RSZoPGjkVXu8LY2J8yxXVzIupqAe8MXT+idZ+l4pMkKa7oE83Rtdpppv6O2p0rtAjtV82LVbPxqCvFFX0prugTrxhFVqEA2LpRpxgByy9SZsbEnaX8x6blsjdWeFd4ATi9LsAqbwDcUB2AAbtY4Dm9yH7atGhtA7zEHt1wu4PJCXVYIiGWDh0cTPCJbpelOvU36nr7BDSS6CIM5nx0lLsk7DUIeEdawLph6IMvQEzdhrSwwktyB+RclxlZFeZQTjHL4DEt8FUjgNpfasUfflg21omlTVhLyd4x1JwVd940kc0yhTOt8ZpH6LBWATZlhXm7qx2aL4IELCAXIS5QAvdMfM8CnR2qSJt1xUAuttLKGKwNy0i/vgpww1kCxjmABsC+ngJ25jR8ob3bjQG8zYYCvW3nmzq8YQJQDgsaAJaCq7CuEFIIK0QVYgpJhZRCWmEDUNDf/0NJoQLAlxIlkcgNwJkyoP2rvllm7nw8UWi9sTrpb+Uo3KiTCbCm9oOWjOfuYnjAHG28UCJwySpaD03ANEdDvHmZMWUt+u0AgHd5pHn82pz9MSFCO2dOC+teOYyVJS4WS4nJSBPZdBW0aCw+AwQVAgqs4lholKu1otL/2Ac44y5lLcrwOiZkA3L4+lRUuiYzwvEbYLcNitVdUa7rEgU2Mo1yRVfU5PZIGTpHI6zxipr9OCVlhsemARK+1icXZBQ3ysBYhxXlirAoV4RFjYX3TkTfYda274FOP9uA6jUtfHnCuKqqJ4VnAt4IF+86y7LwVg5gqHCnMAZslJVhLb3ktnIM4qaPANj0O2qw35SI8qwDFptJIdi8RGSLPW2vA3IPjGS5jI6FaGJ4ZFE11yrP2bAWtqIF96P2bCnsBxs1mLtGC6zTec5jhNVpUa5Oi5oCkwiMNFfWu8BDF2SSWLAmgbGLf5Tr1qJctxY12P4aHnR0Hljzzv9N88r6b5wAktqU2DM/JidAAOB0PYB1OwD4ZqDZHjW9HwAu7DG5UE8AoSkovgArAb4jgLiGacUp217R+ieukCZgYzkxvCd9YXEti16TEssL0izLkrCRsTCdDwAWjYm1r8jCmaJzwezwRa6YOWSKhyw0e0R1iVl5AWq8LT8gOmTMB4Zxzz6ZKhPBXJE4HRGsbabo1mxA45lmeBwZM/jkTczwZ4GRM9Q62BdUHNtlOuI+CuG9wh3h3QFEmOOK7WdVR9Q0mUiGdQxoi4W7NGMVS3LSxI2NrXfixipOAB6eCMUN1s+LrEeVtz4H7NPADlCG+ey4Cf2qzwjwF6Ueu5LE5X66AfiogOIbk4i2zjBBULfFMAFq8mHQegN0/YIhMZ4Vq+KWQpahizWFQ5ZgM1CUMusorIZUbRRpgLWtAqFn5Rw6n24Cai1Q9ByFJkFjrJCDLUDipqEiBrMWPcIBwZ5QiL2ooTpSJ7ZPeR80whpbt7fMB5/zEg6vfQl0PmneZaVjbGhng/l7/r5GZxbWJxMrfKrsHPC8oohJZGiF90SkBVleUAuYUVHQqvRfKTuje5Bl4pNaomftrDcf/UMU+SnqEyp5+vuHdGFNK1JWQg7aGsDbXAdYvQ+A/bKjuji56CEgHqUMu44lDPY3SBjMC4jTIyPFz1WfI2AbMwRyAPtp0N0nWjs/GpsGnr0AfJ6C1pV26OlrUR9gOLz6mDDRJOk26GaJcmqdphaGzw0CaGBhRz3R/TCcG2DadSaCrXBFk5sCGqwjPkiTMDtMxjpgTSp7ZN612vmW+rDeZco8PGAQpqZOITInpG0mYrE0J8CmxQZ3IsOnExJmGqGmPWLsw1+qnp4UHqhDbzJhMgu1eGQZh30C3j8UwzibfcZyI3PQPeqdd+blrn6Vq9KpbwMKHujWMw1GbMPSHaNNYrQejLQkacMtYy2bhO1jgI0e9BaX3W+ZnyhpnIrtEMDF2xVbxvEKgA/1WV5TVS4C4mPK8AVP8Y2BesmJFnFAwGsviERz+25OIT6pJXGxa5444INkoiulqVs/J/fARJypR+EjE7HxhooEw3vDWwZffRYVXr8T2SBM7oE5ONi4Rzg84xadr0KuQgcPX1gX4Zwmk1OqOkzIXVFlYf5QuBk5Z42FVpirhV0+JZE2G8F5Z2l2Qu3DGjihuVYptCfrykkm2zJICQPc0DU5zPcI120DbHxkSrifJgD7fgrzlqLhjPZuc4scblrbZvIMapUswO8RNR8+BY/m2GBHLJq/5PDEWyBKe+frQKFM4fshOeypJNzwihwWpAg3z5PDMhNJ8oG5uoNzhsYrVtsyxncBDY1VpMzCBm7b3FMVXjeM8RBWyFD4tFKOlbI3v+mGT8YKt2C98ZoK8mlRht0GBeJLuvm/WDRWweUNHpHl5RIVIGDVZ95g6awweLkrb+zzHgBfXxJmOiFgijMvPdcSoZWkfYpCC/9qArcuIDdQxqHb8f9cUCFENzyazcvF7w9Q/yYTiNICdw2BWoeG0Rl1a8ykekh6QHpMfYJpV5vMCTuQiTE2FM7z4RyivircA7K0dpobgLRmhM0z8qb2AYrJM9EXWY5fo2EfF5Hhviqc/w6QYY4ZNmPSoh7DV5TzzLSq6wrMwSrEACs2o5VnxUYgHdbKisiRKnBxRcFgqrIgvZMmGbwlU5DB5YPCE2D/GtTCmimRYf8gGN4BUmRsLL8SGVYZikVtRXu8GSSALfFF14sQ0PJigiXVwmGBkbjvjynMfgByMVDM7onz+ojJBZYKCwrrDGzfMS0XczYCDzVA2gfaYAg3Ty97+EMO31kU7oElxxgXO00LPQINs3b4TmOBH2gUyrrV2Dz4MGOBH2Ys8JOMBX6SscBPMhb4FcaCidDLnbFhKkw1ycLcgrZc4ZbQZsL4gIjUJcK0rUxTVazSVom1Xauwjd7kWGXlUDwSyrsAvO8mDBYQZeUfo61wqtAE2Lf/c8fkrtUbMxjCbatwLwrw8EGqrPmt0v4jQcOHhHIZGvr+yOEdS7G3swALb3tmTemIDF6cFosJQ7n1Ig3xUoEIsTgXoPV432HId49Cq0Vh5p1CfF9SuBprkH9m5MFU63FA+GIsCy/SiX1gjdzfFqD8Q4u3Q0CBibuHSQ3PdvLWxgpDAJ4xigorYyRE/k7hljGwfZhwBdbXiY8Iczm1NoxVjgDmXdKmipJkKqD24hTgNusKDapelemTuaPd9znhNqchaG7ja4MbxsGqGzF0VZYm3IWpst8I2LtJhL42uYsquf0d1WUZ8uuHXIjlOBhPiBCrm4SbMtjGM5kjjTU8A3jYTFUM0aOTlPFwB6AVWNDSPU7Rr8bU3TNGdjpHgMyElOY2vsIlBodDQjtAGIYIs0/l4nSOXxNarLfTZ3vaGFqDU5O56vySQk7O50uAlXoGuJjrzXEpcs7giadQmrnlKQEb2ObkPmIDUjTAsmQYPFGDFzZyZj/JoNubWkSMdmnaoflyXESc4yJiMfPt0Oz3kwYYGEqgiwF1Lr2wwjjHpb45rioWmiXN07YUJkPHBAtL0A2TahL4rkZvJoj1xDkuJJbg2G8yxxXAYrXDOrnHJQLO0RwXFOe4JjfH73qIwrsi1GYKH/SdTRiIsW2MYEQ1+FX7AgBLhnPG22C7OPhcihiusykcvGEihthcT7g1NiA+gpHjquUcl/+Kvt0FRNhweBd/0/z4QPGe66ac3D3CwgO4qSTBeaEwlCD34wd4m0+ELTo4vbpybYCF3aU3zcRV2RCQHZNJ0Q5bSEjALEtpdMncHRF+IywEmxiLufWlIdYpxCJbQImFhDXV4YnCPqFyw3S+7lX4TA4PqIRrknNr5wB89HTT4NV/CdVllSz/ALBgvl46Sqc5wR38EGbMwMbXnSRul5bukPW0tQWdwRbdw2GGrLLp3MMMdf4rwK22apuplqa0K7M1nbcNLSZLbq2lbSbH7+zC2Jf/BpiI+GcVP4xPjkVxiu1hi58GO9MvDPZnnxtX2qH4bRxpgOKXwb70PwabvBVXcqsV+DX2yb9Byviwj/29kQ5Q8U76kgIPxpaMik/Gww7xjwabub8YbBr/bPC5n7MuRSMW+s7ixsZBHk0jR6vYogIbpAi0yZzJpVOgY/DJoWKXwXrUXJJeGQ97318bqW9xYLAPTrFETZG0bPChnOKOQUZViioGG2gVd6U3IVAzeMXgrGdsbMTfMHJeFfeZDbfOLR5Ll0bgyLioWsw4aJCotoQM+LDdvTRp7V/F7A+EbH8KcV4uhc5P/+EbMa8Q/ASFup0uJX9CfY9CrMYFfl5Den4G6kTpvqQEyhDcffvgr0dCZbRRMTsrIXEUF6dv6ABetzZCpuBgHcwgmR0ho7CQXURIB2iAsPtZ/LwtCE1FEOIXNfDsJ4jXt/DbQrjNJuKUJsi+GBe6/iKkjkzsyCZ+PsOqiF9jCFPPjJMQ+otU7TiCRXcQd/sDtuv4eWSxPVJCC6hylW11Vwet7yEa3LaQTvQXwk8U7gYgcCZhGnfwu98WuvGGmH6WZ90L9dM6D3EjxPZAWrU6zTLQ+SAO46fl3iItOGfwy8Wfe44c+kJit7TBYbX+fhiL4iHqGUwih4cT/i6j+OgFk9hAEmg1u7UGkyYOlh1I0/ABaaHC/hLKWvAABHhw3qDz1rZxaPNoAVTEWS3w04OBA2Pnj4c6iFZ15mifCtJJwlguXxLtC+dYBaQEodXfRVxm5HRY64jNw47z0l6/Am1DEkCijh+0dEIdDqt7h6hWGTm692g6exuFB1DbElrIieVg8XlJHxRs1efIocAie0JxbyriaaHABv+35MqELzxskVnTK4GDp01FW464wLrBu0HFgMGTrmLQyG20GJbL97+BB4PBJemVcZb/Bn2z+ueksSBJIGB8EQEs+hAIGXdTICL/EQJrBhaWeRPqyOVZwDVeSUCCPglIN3RNQK6ifYFtGuZNWaiMRoTGNE7SwEoqFBbYMzIMddJHxtkXwAbDAhVa1JTZNT5YdKUSTvrCOFWBSy2pJ5c1J33FgrB/jMBAzkMnfWJOhJ4Z+0igbbaEnsvNVOCZoV/kQuWkX417LfBuHHBjaias5I2x7wVuKbojvcf1zEkPTRKhH5VbGmT4a7aFroxTEZianNAPrdYnXeekC5Rmt/6M70PgV67odutLWsduyT3jSGCusDB2X+BTuZnCVGpvtz6UGStMpBJ2610StlsjFb0x6rOxVwIvcukRkFR7AnJz+RUYmlehdwa/7+Xib7ducWbZLbkHgLuS+5Ld6qtPz7SEXrCEE6mE3To0tiewz3IazL6ugfeMCxe5LQwEqjQoYkNpu1XC7c1u5ZmvjDv3BLbkLma3to10He0W+o4CcuIIlbPoWGCD5aUZJ8nfMbl0CkRMTWjA2LbAujkQ6qMVli0KOMY9EbDAfV8aN/hvLCeZ79/4QI7M+FDKGB9LvHFJ7vL/7EwVv6vSt/o3rpnlv3HXOJ//xhcScXxpnPa/cY+0j5v3+Bon7bgJuxNz/298KlUat6TJxmcI05ZO4L9xx/gj/8bnMHrBP+BYzjMRvCPkWHpG/8YT/MuObyG4Qxg5RJN/4x85xuMVjt34w3z8G8+gl5Mi/W/8BdW3nMd25sPgW60W/jfGfjnNx/K//v5vHDSP/8ZrLEh0u//GNl6WG/P/dOwaZ/Rv7JnOv7GM8ySvDbldj7co3Wb18uZLYq/kgI6lg1L8F8GLCRFTHgtxCpbQyC2Ej0Km52CPhOyAdW4PhNqhsFC560YwLx8xlRVUDfjbgajQ3FCI3HuF7/rws4ufsz84pIRY9Rho6gXiIYqQe6+4fQrZ38avYyFXv1BvXwt1txlsu4TfHxtCoxmUWUZpTs+BOLortA67+hWM235IO0y7VIbLDgrNziGYNIV+4qdnt1D5X1Y+AscDCNyvHJRWjLQAeeYJQU4RxM8KraOZ3EQdJoEGqD8p9BdJWeOaUBYTRbWKSN36OhOaLfIn6mV/pvH7pcYkN4WWlvyJRvCjnv4sSB5WlRF+xvHTh/o4sZ5QuXdLEosLUjTBFw5OAx4xtGINJIZMYzMhmzgeqy0EOWAlKmh5+x1RvCAKc4ooPDZkU3SEblRphzjuHHZF1HjjnB74aZ8i6QZa2j8Fn/Dw85vNjQi2harJnVXIHaSjPM+VW4bAWdBAC7g85awCsnO6cGycMiG2SfELxwltWf6A6prh7xEoh6C29O3kXMRxsD5xdlRwvBu3PBeeSBGnQr97nCcZNIi7OmaLnggNIFqAB6aPcyOAVq08wGqIdqsgmPOOM8OPHCp/PAz4R3AjNyyij2C7qFLlmwLUwH7CGWmtowxn7ZlB0HBOHJLgIQjy3kKCvyhvpadtgGclzkcvjDTtokVnhEjiYGUQPrxJIeoYfGayaKrkhPVK8tThadbGmeG2izxE+O9Yxwt16MLbmRe54LgB6UQcCPjNs1CfXH3cQFjUbiBknLpAEHdZN2CZwLnAmlxZ3ICH3oQbcOWy5QYcMxJq03OTUTb4O2v8ewLbVG9plAT1MeqjGj9tzoSmKEoabyXQML6awD5u+W7gWC59buBIrQ9pVybdUVHJXArdlX9WAbk/xQWqBvlVVNZjgEuDmBfovLiBgXEcgStj/wi0MI5zA6ca70Rum26gyYqem4nQDot7k0GkgFztUQe5CXcE5F69LjCixT3uQ27gjiXdsN5y6UfhQ+plhJgU+CGzxOXZDfzhvuwGPnHddgMfzPvbuCj2C9ukuoEFZehkWKN7jFWt0Q16fKNH6ZdYoyfj2gJDjM1Gr7R6I30mnWCkOno3cBthZnz0icHiaCYdBWs0pY0UJMO7kRaxwJBx9GvwavnoT8q2RjtUlEmLpCXSGmmFdN+UhO4ZT4alowOKWqRnLK6NgeZIbtdjgZ7CBfU2Rsojh7/XjYzBRz5m4MeYeBQ2MuoeRTg4HwVpFCKNy73MGiWojlKClRgCaTIyxg8JbOEoWcFjuV4JHKHbZ9cn6BfVR+hy1d/QJ6k/46VUu/4kFxSBe+PrCtwaD3Ajp4Bd/0OvpC4HsCTwo3Zf6LbUF2omN/KGwMzYI4EP40o/qj6VW7Jdb+COa9f3jSt9uPqudFPsek3aw65XWPqOsfcFSuw51YuMei3/FXa9L50Muy5j8AuBSy3hwngIgyG/XT/DdIRdbzHmiZwPdr1pHOls1SMK+Fe26yH0Cuvyn/wt4LEA17jStarb6KrYdYuygpkLzRvpn9a3jVsR2JL/Hbu+iQ5YPWcc6c3VN9Anq2eoSLNZUsZDpaM4BTPSrT/751m4UHh4IepAIGh86wIhdMM8LIFeCaSNsysQlXGdQB7noIeHat8COWkYz8pKQA/zo4cCNTmFPXyq9VGgauygwDH+cT182gfm+9ir3cPG9Y7AnvSBPetMjoBntU1P6KncvgUwePCsa4Mc+gZlX/J3V/pInnVBySMld3KOedYNBkueNcICHc96o/5Fbo4C39KunrVgGZ/o5HrWVE5hz1rJofDw/e9NgR/8L7nHC4nmHk+ljd3jD/6HH8vZIv8IH7+4IBwvSR8w/eIeP5G5Ib01zobAHXrb7vE7ZSMZDrrHcvWJCEzQL3SPX40PBh2NcEK7FumA1+vja1zwj7FDoIBcCUsClzTokdZwPT/eldzd4yKGCO7xDhUH6Pi6x4fGkXvA8RFlx6QyLJDL13GCl6/jbXTh3eM8/k3c4wKuu8ebtHNwOrrHLhm5fcQEZGQLsyBayT32GwewjpP4ew/HXFr0fCrUFbNPfOkKPx9JH4Raqwx/3wnd7lOwScECv4t7Qp23FX4v1fwM2t4P5JEiJaekF5S/8neZFEV9zPgT9Aflf3yRfyK9JR2RviHglAks3kFLBUqOSJHA5jHEvnMI6tv4fZSixKb5GemAkhx/10h3Qb/4uz+mpCF0AxlsoBnyESFeIg/qQuWmP0EHqMSiDo85UmwgffvykM1yANoZwif4gd/BNtJqo4kCE7iHHYrRrHtXEAwZqgRPe9NinWty1uIr23KocK23vw9xf7e/PzHO+J5S9IVx5fe3sWXcOOzi1mgPL+W0tYc9470JXBlHLsTDa1gPT0nrvBQO5aySkenwkLIjjJ7sYREj12GJojKum8MKxqfDqvzD2cNdXseGS6p/SVekf6QfGFsNJf+8wBw3Cnu4wHVsyIv48Jv0lfSN9J1U/rPkkjm8IXNvnKjAEP9M9vDJONIKWzJml/vIMMvx6jBHw03NeIsMB2D2MG98YYECrlj2MIYJGnsYx0V8mKBdWiNkcMsY+iiSQbBc8YchbY01uQDaQ4s1sal3tAE9udK5zUf+hzVvMVfjNm8U7hVG+MdvvhtP/r+bY4wt3eazqlBbt/mqzFyaxG1+YcbAbU4xeHebcgIIXapspSCX34HALn2lyzUWwFdUBcoytHWb7KY1D8yD0GM5cG7zyLhyEWvuUbGvYdqYmXGb51r4iXGl+9VsGu9LoIVeT/MUA3K3ecVLWhM3Rbc5wBW02UUnq9ljP655qREsTDi7zRD7jU0ZFAoNowfXXGexSQzy3WZGuipuk53QZhT3Zre5zazxrF8gT2bDuNI/a2bFbBLBqT4Jy/VsEpJ4k6DcvScBTLFP/NK0E0e6tRMZl/+brMkldFLAjWYiV8Gzf5NtqLaMXxw2MVswYcAUfZPG8f2bJBA2jkviJIafUTmtJw3po0zkpN77N6lJ+02qctOaVOR0mZTAytVZiu0b++3fRDoEd/8mXRNw/k3OpVUmbekUTXD5n0xwt5yMkdgr3F7QAZ08IczQyIBy8kDBHdplgpN8ssIE0AQ91MkSVfnGv8xkgZvB5MN4g3+TKZ9pdNBhc/pnpCc4U5x+Uxrc6Z/KmFSgL4N2gSsaDIzXF+jSoGp8pwJy2jwJlIy7LnCocEzzPdzdnX4DNzynvy/jQAHOsvW/SCWTsMBMjoXT/8RRd/o/1CwNMllhftTp/+K+6PSH1DzIpcfpP8op5PSlh+cTuDPOpcCI+gkp5w/7b6QpbPLi9DOaWgz9fqefoKrA6dR+3sBgg6Ks6QrVZvFIXePYAmtoQqcfoixIGiENowPk9P1kfKTrmPHsB+R3/wbdPXdxznvlQjp6Mm5ZtNARdhdNzI+6C35HwF1cy0XXXVxxcLGQc0L+pxY93vEXNVyJ3AWul+7iiHbHJi7DmMU+x3OLhrkV+sWB0mJubLlALORoQ/gpnUp3MTPOi8CHka7F4k/tfhWkG9wSeFLugWUMcVV0F3dk7jn4WUwY6F2tR2qdwcSdu5ALoNz0FwnjyJhqIV1BVDnGIdFiizG2pQvsLnJanayCZ1zpoiwcXETdxRrGa4ug8eQSsfAZF9+IOzLe9z9nWZMro7PcxZXVWVYVdjCDvDwyPjl0ywOVHaLZlw0czeWeXB2cZQfT0ctz+p9hgnl5ip6pszxRjz7+C5zlJV16PNeXXYULTOk5yyEmKJ3lPUPfYhLPWcp/Yk5A7iqwwIyhsxxJJ0GAJ97yRa4WzvIbU5fOcs5MF+glOcupcSyBD5wHznKJs8JZ/jA/7JAn4OHUX9qYXnaWa/hXW8oZNhQIURNgJn7cTJxlGnO2zjKDiftlUu7xAil1jWOGe5kgjTKMjKzkDF7mcYY5Szn3ZwLbym0y7w0jN/Sx+B8AnPIjuYgPYLVLgMUWmQNaWKk15Rq0D6wIRxcq3FWoKdDb9UUJ708q3FEoKGwobCqkFJIKGYUE4T7CKL0By9sIqq6oUCb8qcO9lCfH6f4TYB+8KfcDsLoBgFO/A3hpG+B3QDMVan6ocaY7NFifAcKkbuOSqvctRpLOM4RHjxQWnsn11wkPFqFVUc5HeDwhDJiWVVb3towl3yX0CODkFoDMN5l6E+BdkYlNCe1dyjYeAPaTQ+FHi9zsBLBgwPQHNf4dRseYWAyqjxRGlspdM9S6n1zxk7pJCuCv/rNH8o8aJmSvAVb3FGBvrqnOIZeoEJoBwl6cEN+hSYqcExyRG5Hb+aDF9YIhfRqrcwHw1m/U21PoKVzSxLdHh1mU3NsuIZ+hcKXcoElYFQjtE8LfhUZ5U2Aq9usDdb0S3UuHhLK4S1cq4Ad4NjlnkAR8JijzPwCs7pKwiKlFkW7ru8rVlQsC3Oga4U2G+W9y6CaAaQfU9w1qtQnTMpmUj+CeU7ZLn/spwD28oeyQBvkAwEukAaER6Ts1s306lT7IHdYBf6RObYOBGvR1h7eA7B3o1glF7iftHlmG/c3glsPo7vYBdb0sCw58aZ709tI0ibEKdilJWVgr/ChZP5v1AajvB9Q+eAL4t0BDO1RPKEmAljo0+j0AOH8pwGqNsn6FxiPQwAMpowduGf2C2g/+vqa8xxj3DS32iNxeEGDtp8lN3wHu/R59mZ+vBBqjIsgSw/QMUu6f073dBGSHLCbEUBdJFvPHxPbU+I7urEOAEidOFz9T8hdBazn6zViVlFwzXuT8bSqcATJDUPv3CGD171S1B3AufsnlV4BZgkz5j4ZHajF/BHgBF/D5QtUrZW71/1I2CMUQLTqMVKaoowFTDYC/S2b2f4ZbDJsO0mmbssI+S3z8BoQmlM0fKPs7ZB1yzPPTp3lK7o/G+swB3HqAcKbCukfu8RFw4yezWhLw9PDR2NsjcmOCJcNfAc+tUncaBpT2QZ3BADBrUPPeBARPqTnKEIo0cGt/hMG9BtyizipTOB+SK1Fol+IqHP+zn4y3tgXYOwZ12o+Ajz0ygQHAfaPM24wBKueg/o4a3BOKEcIGOc+9oNfPLsCe31I3/1GHG4CFS7mY1NbJuWuE6DahEKWlv0X3osuYGx90uPfRZHVEHfNx3zdpEXulLHFALvhEbjYFpJmyO3+hrNQEfErK0mFsRwF2YgVwJkeAjUvQW4uap2/C3RSwmQH1NlOAvAtq5UOEOs2tyr5Cm9AkeME4o2+VVNcj7D4A/H9kqu+0WGVU9cYyq19M8SxP4TYtvdiIwkKRwocFITrT4ur0+x4rJwdviA9mC3Vyv4DQHmjxGNTqURS5BnXP7qk5p+btD7D9Qc/2FOAbggauQOMZ0NqStgeYBzG7G6DoZgxNOQdqF2OAEqYHTBWzFnJFZVm4AA0NrvIS/bHIiD0y/gOGfCBXYeLWYBNQoKbCJHZ/mHE6T88bFva+YIA3ZmZ9sZp7ctbKuDJxBXBac4C7/U4O09Ci666Tw2VHoNYEWFlyLvoXYvIQoe6nSihVCN0FTVJlmvxmAbl79Y6p9zMN/b9qQaFT3lbYAvjl3+PGVA5BnXIJYG9SZtWzADd0TK5NLjehxe0OYUctBgFy531y7y+MdXFO4euQbu+kY2rWXgHLIMOWbyg7CimE6fTQUS6mJacZt1amrk9vf19VcYUEYfdAuYxCim71C60My3EfGcRqMJOZxL+VWoQJu48A1yPnu6fs1CVUvgCpT9DoNajT7gEiH6DVEa2aM8J+DRAoqOxX4YdOs6pyD4DwlTJ3CrcKNwotBQZO/u/1whjj/y3aLHlMSk31VRXMz66eKtdUYLZO0FPu/4AlhX3CnhT5Z7yta4ATvARY6WeFCcC+tAFuowuoPZJJeHQLRMhl0ircBNSflMkDfFnG9Z9S8w6af6X+0GUZzhvLaNLA+ZG2/pXjVAW4P3OA87cFSHugfy0VJQH1fTX7omz1ArCvDwG1Parce4U7tbgEfP5Slv7faw2w6ymTp6q6poWEVRgh5C4Ik0MV+piNWuAUFWj8nxuz8hI3yh0Q3uWMWZlCDRQXxZVcjyoA+/wHYM1o4M+AfvZAAy+gztsFzV7OKbuj8atF1WMOUGdQH/XVTdBgAXTzCHT1BRq9ZJDoguo66QlDdb6ZTOoJkCkxiZaKRoDYLujimdFtxk2AhsVPRuF7DsDbOgW4/SrA9wVab9Pg+pgGax7AQcdN7PJlwCJHmgS1goznpZ8oS5BJZanCmEYAIxz4phXyCtss5fRJHa5oObVYWPKHJsNXCh/HhMirCj/ph54cQKr3Iz3dMSEXAKRToG7PBtgHj1SVdsm9ZgnXc3W7JvhPKKwUyD2prA9ozEA71LvFEsCzHcAmaWNF46+AAoPX15VZA6RohoGoBFgJ82VyYVDX/QVY+AcUrk6VdTAH2NMrgLd2Qi77QJPoOiD2CepM5wpjQvuacNGm4Vue3u4OIPJFWeZADYOExisDb/8QnkoURuj9kaQzThKRhRb0LshF6VuGQCeEtQ3A+hQUo5FvbLMq1Ho5IKTGADeVJ3dwBojcgnrpJSNM6Ot+xQDRU8oaT7T4odOc5vZsh1xkl3afoNMUqF8zieE5DZbZIFqdMiskB24ut84iwHlYI6D6c2PVxoDPCmUYUApTAv27pROGNQLRLK2XVXIP2wAvFQC4YXJulJG8NB3cUYiWL78M/NGlbuNRHQYMVmUJyzvKhh3a351QlTui/VYO4Huj6jNF2N6iBcYhAjheIuzSxF2xUOdLLjwLSX0OcO83AYUp6YR0BBo4BUUXZSHH6YkQ/KDnIgtwDkCrXdD8ksYurXwWIENqfQ5YSloL875o/U0mwjwycc1mquZ9Qqug3AUL+mMqdslPt1BYvVnYllTwUzrXGUDWRxqh6JAa3yfo5xLUqZFxH5oAy28DfgpUzRuEIFX7O2QerwkfAUCcsd3iqaqaCuQsH2PYmJoTk06MuiPKinIbnMmlMwiw/CHAD6lTnwH8BTIvHdq9fwLc6ocK9ymc7pF7ayowouWjznnXWO8WYP8G9PNB7ZaqiTOiW2Yop0shJtGgOwYEG8pUAOgfz7C7r9BImj5Y0IF4EQU5E6ZynQ4DvvdA3ewOAV0IgXCaEFoRqseARQbURhlTgwkkeH2q7E+Bdm4qotyGQkGhrrBL+G4D/AvQrQctOagGHYW2Qk6hqzBQ6CtcKdwoPLJOfx6gHlPZSOGDBQwXyq0UfinEtKO4vcql7EPOqhOAvbcNcMZlCv8SgM9jyuqvgOkd7TBEBDwCrNYnzTN5cvU67Y9qFFb9FHbHdOhWyWUJmGcQJrKi/YZHi+NDgLdZofDnQ4V9wEzKPpKKHgPs2yeAm79XoM7pXRDmPUDojEymS7euRbe/CIXTP0AuQ1l3g9DXGBEKra8RwFt/UJMBhe8BLTtEWJ/T4eOWKeBJuQiLMwox/yDC4h65ozy5cJ3geBolxSwZsraiJr2h+Uv8Y+OOzwH2ZF+5NmDjXZkThUuFC4BTewYUm5S5b4RHH+BvAppLU3R4B/DSz+r7CbC2lwyB+QARrkYs+feL3Bv97O9TcvMO3XGNPjafr/TuOYQqg1iRTdpv7gBuc6CBNYr6FDljmjvJGYV7R4xbThIGtwyf9Kgr9QldJvnB8HM2h/MlJ+eBqXdB3SXo3w+o83BAUf6D+lvKXvIAK/0OiFFj7wSo+rFpPnsiV/SUi9MkMaVbu0DhV4pclkGs1yDhoahRMoQLujsYkR6Y0bvKGMuJ9AjoEwl8bNG7swPI7jJ+KAqI+zWDGO2+csqxsvZ2lsLCJbnjFiDEqq3kfnJo/hagtTNQe3sT4AQHAGt7nxDaAHjpHLn1ArnwDS0vXgAunhQIhzwODSorhpkxQ752qerS0Nm7Y5mvoI1d2rk/DDEqqteE6dzT+o95LJagW00t/oCudU0pS5mTpJmmuSGHfV8uDSWA9TJRrgzY7ypTIXNJ2qPZgk6pM1D3cE4z6xVwe6NxvhUWAPv3mRbJNIWpFSOpV3KTzIUyeQbMN8n11lUYBuy5oFmW4XRrhNkpzUdtGjCZXeZqd/3UZBjIfdPiQ0zNTTwyiyZjuHVWy/Kfk9uWUA3jlIOA8Dqo/XtImTcF+MZkcHFqmFyUBgMah0Hd7iUgQIU76xNSLsBqVwEz2v28giZ7oPceaGNCoxX9k0eg3wvGioBm/1juI0VhpucWaWWFauT6PgU/hYMn5RjbDhUYg+lklzRYJ+P4pT51U++BWm4S0FgHdRtY1Cn/nlh5afZPSVugPqwANZl30F1KnE4cUJqBxrH40ux8M+LfF2MltmgWpND1ngDBbcb/xppQszcArTCm+8Vs3KMbOsWxNNT4h7Su3jNsnllb0QdafEUAW7TY+aVd7gOwYKalAsNt7zLOiC5JDeCX82PP5IKgn+egTm0KSPtBl8+gVigE+KLCmZwC/AEyEWrcxwdC94Iw/gF4WPIqJsk1wn1LOQrdh2/lHICNETIi08++viWnqXkbQ7W0tYQME6rvkLu36PD3yFzjoNM0zeN/rFGBIlot5hquR5eA1Lgmx6MOcDG0F85+BVilNcDvHVUY+IgKc7ECFz8A+3ifhuE5TVrbFN6uCJF1gBds0STGkJ97NPTlKYuE1aJAYXhdIUCh/0ULuCfnpllqaF8denRHxxTcicIp3Qsr5Z7JDYaEozOtQo5R/MzWi9HEz2o1KtRkWAzuTWiLN2bQlyR3zSoHah88A35vQa2SR9m2C3B7a+R+e+TyNYAXC1KYqxK6CYJvG7C8ZJDQkJDPApzgH71rjOyhRUS3PSbgnBITP/OxH64If1MKe080aT1SmKXObWyRu9NSi9cUtt/pUEwSkmXqnpiRFb2hSeJZ4VXhSUH9OqyJ9f5GztOGqMstoCLdjBIAT18rcr1dA+B+hndViwA8xxRNugvwtqo0mIBGSF1M5eI1XLqG6OqU7gm9GWDPD5o5odnIr2AxzhvpLUWPK5o1yWzfaD49je0jHNHcxXgOwg1mFP4AWPhvrfAKiZefMxqkxSyKDDw9IjP9VnM5l6ty+OMAPJSuGqs2AjgdD7DTBrWzI7ULUfWQJIfFBVWztwG6fkd6S3oD6quDJqmdz+kwGzNYJMco7QsWymJ2g/wdoVmaVtEzMjVauWcqjOwoVCls0NcbkqmmGMJHpn6sdv8XOGFVLI07pZ2fOfydqwGzd2IBtXgnJ0dgx2DubUeuhdcAp5kAWO9TcriKCYdr+Y75PKQMN50dE1+BRuMUrQYKfYAdCSp3zrhvNiCzzUAVysJbypwyhKvWVfpS7xytaaA9cp2xWqRp7pBGQfc6jNYFLTMfNJQ4Xu7Q408d25tUBVlBO8myWy1AsKt2F1rciKqxVn32w8aJa3NMFGhRosZLM5/4K2s5FFmZD/rL0uUYAfCgv8wH/WU+EBfFW5VMQ62eAJUrMvdzwssuAM/1yyaeZsgLUC+RInMNigfnZbxnJxQrCBCrAtgv0WgfNEy1hVMc+nvKGNHptgBBFhXvgEYp2T2lnEHiTMu+u6G6ywKZoo+JLC1GfykAIlMyh5t0cZiKVX9h3sE3Zsl28TO+le2xANYx12Z82rozOUeKBv2QoqmWQBsOqONdAtxIXzka2E2XQgyJhUvsUxc7JufQ2wrvUtjdJGxcU/caI7SPABjOSpCwWvQ+6Vbxk/Nb1M1tCsceoXVLXfKUOuWs0BgwP6GqOWX+bS3sD7CMqmZI68cBwFtnba3uQBNnVs6aZvzDgFZduWmEHJ4UFk1KG6EhnaeS3HcJsTXQfAfUmV4CEgPQUBn02wV1Q+eASIm2PdIberycEu5ngM03ahjEPk4B1u/p/xCj2dgCVBkz9gdqHTHmbh1065suzMs+TQIan3SMNAk/oIEGQw5P6P94wTJJQx5o9IOSK1JWxd7bZyE1OkZDANy2hbmjB1PJs47BL2ZSYfAma5fvg/rZHu67HNKBdCk9QL0AisHCQO5MAcIctLHB35gmGpjUDyhGDgM5kx4BgTZpBxQPngbGtnYBhTQdo0vA5gFF9HBj9IiCYkAxMCsGD1CebJIesaALyllclkVjVCFFM7Mvpty5pWWEXozjeClCmc5UYPwhVZHL+LWx7o8Iu2MAZpqv5R+nAajVQJ2HLMCNUObWdmjhFKgbULfcJXN/DkhGaIeZeYHhJlVHG+p8qdAFrH7J/D3RYlYBlPtqUAd4CY1R+1Xui9zPo3IUfrJ8e/rKGM1jVqV7QGHumxH/1FwrdiwH5kq6NjmA9VABeBthAHrqV3I17lOFkbgYHnxTiKa6Mq57Q652TSiPaBJZowMGASL01tU9T138hzD/02Bd6g5fCR6jWGcszz6dklsfs6CGTeHOEpC9ZOQHWtjNNxoeuRrLT93jUoVFhU/VhTR3zbZHzkszsSxl7hfTc2esXYmZ284jAI+e+mb5BGq1TgHOagpws4cU7l0T2kmA/Z0B1Mtk5lXAXg7UC4boHJsRyq8ALM4RA2pKDOD0nhnudo8RNlcU1uhkO1pURyHLbDA5LMngn0cM2xs07O5T+EeL/C1Vs13K3t7ofB9gmU0N+AjIyll7KQOCGMAanBEwkr2UrNoA951g7/+R+zgFVJOUTes0tOhtB2YEXK4uzdcWQz3G6TWjyuoVCasG3cqMaOMRhgi36ebYa+RyFUA9RFn8m+Ad0r75rhYPFGJ95qXxkhZ1s18Ki/c0afyQax5Q97vBVDA70jOBX1Cn9gEI9Uj7pOekTdIr0PQm6Po1f2+T5kkLtHkETeYo2QKNboDGs6T8nYyxsFAGEFa3S4raWk6HficU4dFez2yy0NwN1Rf0o/yeBWFms2c26LxxyPq4oCkmVr4H9fxkQl06xEGxrq5n9l5BbTpnlco1smsWDVCrvwTY12fkHh4ALh6wdSW3PACtK7KEA6iS2q8dmuNxjdhtzACpMGgyQFG7S8ZPpvwD8IJ7dPLRwsofaxk1Rm9/kMM9oYtPm8A+VqHqaKXeNNy6oV1nSuj+Ajp9llUE9bNc+/ia+kea7bLARECjMahtZQFTyb+DnRtAs6DOgx+AJYYdY790AYGhMm1qiqDRC1Dr5ZCa+QSQWoBiMUNHRmrbgHKeogNGqdBleUPIlgHrX2R6GfqkqsxgpYnE+4x9/EQTLDUQLhsGBDWTTaY9pnXjmyWlaDxgJhFWbpuFe8E7QMWivsFgH/dkWj6A60kP5lyuBUeExzXAnjKVDMDOrgPWz0DdOmUYAonGSgAwEjrnuOTcYB2CWM1zACfzRLMoOduhDoMeYSIMhyGNmP/0KXuy6YUnJecGq9rPOc455zjnnCMcyQoPLZAcy7MXTQDGWOcc00i01BcNXq7JDd4BqbiWyrxdLAgCDBUeGCKhDKNjaHNmnK1fQOYR1MZSxzO5AtUAvhllpwna3d4BXDzTEG56QN2TBdj8purIT9XkiRwmx8/M5x+ZjENz9D7FonYOiNPXnbuA6TMoHt+emdQNqJVvaJw8YEGZfZzUABnA/QYNqnHKIhfk3lqswQZLty+bgP1NWuzRwlvXuGG1CLFgO0vOikgztMXbA1i+Z4D7sAOYnoFiRXnb7DdAMdUiVnhc2+bsStssf0BTe6qgh4U5jzYXs0us1AOgcweKdX1tzpO0JY9NwH8snVdb8koUhe+/XxHzJBN6R5o0EaTYEUQRFMEKKkgRhB9/9lpzbtabXWbPJCQhISFJ/kGXPebmn+liwyRbJIcMbIIcZYDjsu7Ypz3jANTZhLHwA7Hv0r3OaHkjtOZ+tjtku8Q5BznnHOFnHEnw2/R5ZXldGbbvHLBaX4CjpoC5/KWzeAWo8o/GXIMpqhon1CcbuCM6tmA7r9I1G8RUN89/EJV9AjenIOWO3eJYGbH/4Sf2t0xJ1nVmn87GpU6xdX8bxlbH2rlHa8f+rIHSTjex+9JVXnSVPOf9NgdgvyuhJ70gekmGNo+6NYvYhShx0WbKX0JnenXJJy6BsKyuTTk2qAI5POHKyESggwnUeuoTGTymSlbWd2a/1AE1dgHOwVY78xpZxtp+In5FDOOMJfs6paCL/dHq/GrnJTNxG0nTONYZLxWG8PUsoc0Rx8g8P7u2HmKMOOe0PHgalxHAk7eMuNS54EHIBY8ULngMcmE4eyHADBbou6FeUO+hmwrUtnYAvrYveABywQOQC9kxxABrFGXklr7tD4CjEjFOr9glDRyWSHLvkTWLWwBHJ+L79RAtF4AjDBhBJhxzHGu6zmvA/Bw6zHHkQzbB4YjEr55ZGr+jXfDg5MKY6hHivxkSeeHYcIwihptzZe5M+trMu5AFcWkk96Gq6QGs9yJgj4nsiKFJAzDPkrSyKSZGjwHcQS7GzRczurQiLGjh13z88fIeiGmfy8/qfYuheIwhh42C0GKJ0zfM2rCHIF1FDiG3YODskM030A1LuT+gs1+OorNi7jNL1aGVOAP+I8C7hlY5F9UdU1lj+8QhfthA4FCPd08vjRy7KujBP+ul8U7gdtqAoXIvQLwOtfcy9JVKAM70xYd/HwWMRhKavGZ8PgTM8KNulAZ+FEN9uswGi75Drb+d9nWZUGaevdvSwm3/AQM3YQSMGgfkeNiHNWiyj5HDSuf03ezRt/QzMTNjiWuvng+Owoq7gcgbtOyBptiThduWJS1Jn3UuneAhTBGNkEZeIwsorL9BWfRhOrtnxEhb+UMNt4ZLwwfgFtmgUd1o1x9R0HkFSzuLLJxMANagAeA4AqFT+jJeVmpzHJ8POjSj75EGznThW2jUAd+5Ticc9f+YnjVGGmONrsa7xhdQbmrjir3omcXdjEHjWHfMYTbqOnCr0dEYMHSqjf+bvurBfGtrygyWU19ujT36+Dmo1v9jf9bgwmj0tcE5bsi26JPvURuwjma0cFO/YK6ddxnCXAAKf5byGTjrFd/hOX3Yi/lkvxOgdd1na6w3sEqM4UjKx5tOxGg5wMLHhH4FcHx1Ym/Dwn8j4OeOGbgfQXy7MS1cz/IZqyyNJhurRJMZ9jXLT9mzk2nROnhjStJPPOu5wLE02m115iUzdyWd8qedspb4DWUfA+ki1Dp5pi/rJvbLGiXAtdBGH7DbB0TziU71ytatJZ1hVlQJLy3PBMiymSpXdSjLUJX1zdMVgfsQ0cFaj+ROWyxphnQ/iZTGGZB4h9ZZ3g7WmYeDQsHJva7Bgase6zemDNns0/oYAO0Rfa4jPQ4PME3qPphg4lzfwxM4j2Flo0ChQtcB1FuGurZQu0lUr6HFWyh+Tke7OXNvaHiOmXzGZNyz7uHNvR5jxoqeB4ZL32zyAa0PWTHA5rhlR7Lq0A1be1nJu6OfNVys4a1Sx1Bcg5Ax/0DxJSI9YIfg4eUUGecvpzlm9wRq5jj0EPsJraDxJgPxAYB/fUnxFz1/V4ysLmjh9NtjlDg2/GMRfXGei+dQH5eljWvFeMPtEaCGS2IzAuzmiujeECMvkFbMzzDfxm3GeAduDsAfU6Qx/pzklf3XKzNwqOGVj7amLcLCtTZxtnps8HzGWGmpK0/p7IdoPbDKEX3maUJXTjM/dslE1zEwnzA9+0TfuK+HxcIqENG9veuKeRaJf7DIe4Yp+H+COFMc3s5m4sehLlJlxqduHfyhhQMMwdmJnh3OuBWVRdnAr3L/LLdh4c5ct2H+NDUyBC6qSyy5Amz8lQGZ98DRIQ0PDXM8Z0beovPknc6zsm59S2S+mfLlISotArfluo0/FrFxE5WgywEdm1ClqgD+cu42sL25+bOpZEVsVr3aEnYYwF8JxHigz56UCOwXMZiExj6dDrt39vSYNpzpv1sdOtGFh+y/SssOc3lYwQmAywfiww+d4psPdYaXGMeJmjRTspOsAFY2RAv/rVLSaYrW5pmW+4FI+enEAZk0+KgRnjcCR6LKMHGxVTBkczu1R+BnI2mOi8eSGTqgFfcxhh/kpcHEpa139vDp0PLMgNwpDfx/UBJxyU8yAleA6wnqZ8QOcnZM+1dXemVeO8E+cY1RGfgTuhh3nBuF303EqrG8SuiBp+ccXOZcz+Ka9dlMbTlga/QD/BSgETaynSAbHdi6rWzajhy2ZgC7uyAu0sBmD+r5ZYL/BHB7odb0EVDDV6KrgHBdV9gCgRaNfgrAnZSOgeMixKcan4ATZyMTT2wStNmtddIDUuxd4U4R5H9ovDDxPcHWGY62/aBHwVZjxrHZO3gJk2howHDJYmQl26Qln/Er4FhrQOEKpGB+xhgefGzJmjJjSuqGzts0nfFvItmmczEk8Dd5Sw5pK8TgQVtssKlpI88OmmNai1td+UPjTTufdT8hbXF8Jp5dIRjTaf1y7Ar3wKJmQXfHYuZ8oIudaZzQ+fXLmrtrNv9Tut0RqxSGOuVHN+CglWumU2RlsA3PgrqizqC4QGQb9VOojfuObSPfhuLhGraBP/DZBm4skAYdJo0faZxQ2S74x9QQw5t9uj6guOlMXLUWcDyAYoORafaauOP0Pafd0DC1wZFZOOayZa5oZYJQbD+2gd9WEFgybX2vrSkQzULTDe3aaaw1thocrFkusBITg1w2UXZnYrsyDWzKpnzBfQL+AdTChWlTVsp9oOZAcYu8acSrDLQCwHECWmQubr+X5o9QO+hhFu5YMo0frWMoLiVI+TKb2LiebhoeF6t0WOWQ0/c6641VbAtoP0Pxn0XTwH2PMuLkDHDTFWDYbHBE9gu7VH4O3Myyg7iu5pdd5558c50TqQLg4OfNPTlg9tOJ/wSIhZ/s9+QQcEpUPhi7nRPdBlOGzwAOG6SI9cbQ953uIEKnzwVYOGwQJ/anYo0SuvMJK5/R6cQTujtdLMB2KvkCNNxsFozpIvdMnNq6dVS3PgXw6whmx8vGL69s1tkCG86U6U0x1PMR24wea5CxF86N9cBM++IQqGSZ+PymK55r65fACankZ2/1fOdpmbIi5mQIW0B1NgAuH+eMoxLUwsMbBHg2j+Rl0oB9uyS+b4HKCdT1yAqfWaa36DN7feZtwsAyyAzcviEYlpmBg1XJeLHoXFzRqkZY5O6FFn4JEmt0BsxY2Ioc6wE0aX0sOLiwzu9zXhx3hhb+gyJ9j+iLsbCzJ53mjcQQGm5Dcd05b5hhBqzBElCzPp35S1qdF1onDlDLQLNsaxdcTIjnaUVyLPvHhA5dqSng/2XAhMa8DHiYZaVTROaHTvMBqEx0dzXWdkWB0CvUx4FEUzoeYT+nOuuaYysz9wSKr2wJdDmO2I4Nk1SOsh7jNGtEE9R9apzN8BTDAzky6gDWoAuYuLlGnNtXjReNIZA4ZgZ+v0X+qQ61NJp0ei1tnQCqsKetmkZVQ1dZdbV1pFFic1wKg1Vg8+4aaDBk4vkmCKVpvaRo9b20/DsAT8iQVj3OgMIZkOBZQlkjeA+1Z3FA2XXAxHUPOH/pHHW0tQ8ELqBWyw/gj0d4CeaBxp7OCwDhK+1jnnLYV0BXP2JCeqyzWQ//O5dI9pnFX5nga9OX7gGhMAuc6jZnGkWNQ+b1L4HIjLqgLqnf1Kkezg+NOUtutYuI/GnjhUPrakNWRLyuqwo0FNUDxY/eaSNcoCfO6Ry0/ARVuM02bRw+Qh2rBeCffGkDx7hpOdR12GRIZRP8b1mmX5jELhTuFEwbZiBJXwDq09N+aKzPzl85wsIbw3va+Abws6Tk9qD1PEvlfHR5qQ02Zx/qs0zjlmOwOPYBFH/gl9QlZyDKXjkd5miLXDpVzmtsxsXCDuw8l0towaJd6gk9DDc4pJCUw4utFIC/YGQMM3oD4PKfRIZdQOE/SWJ9fRP4X1nGaIQYKu/owwVHvBTriDWGn3Ri88BrsJLMnLN1pU+fjz7rw6GF/9jhBVhtJj6XaOG+p4yBx4OJL+cGnD1d2M/EvIeh8QN9HlasJvVIVwQe3SAZ+5w/u3+oY18a/8/Tgik4AoX1qyFdJ2WVqwGO5wuwsNKKszWgdZFiDC8ZSfJ+q6TsmRrMKPWZUXxgBp5+lDTwvx4JxZLA8kMbB4Bq6caBMFvhQTLinM3p/AkAfwE9nDwzcIunWLsFUfHRWXeINOubfaZMWVHtewhbJ86XHAHnxbRZQzl1ovDJijhAwkA4uk2JPtzxjhHIvidlmM0+4E5DQxVOH0BxzTBl5CJQ/CybwhvKRUt7UGtaAPBdlDIOmGr224z8PQNlF9QevzP04mVo8URrOCAGG+DPprE8BDxltvrK0Yf/SEqrwjH7rUF9TKgWdBsOzm4y2z2HOlaePjxAV0b8Bo0wO8QqwYhuyZkIcAlg14lXQ2pMOBxc4RPjm5WdAxYzfy6BGufISrtZYctR63hTz92EAzVHsvfbN9YWVOVGgIkfocR6cWglPICF3x735TQmBbgVM56ZgXVqX+bPRyPDRiFGbNwnJW39biCZ1IUedCcsa3/P6CzeAOUSy+JWL/Hhf3CCDzrdAxrZa7YabQj3KTCvMHTYBpacH4WbJKTeI8cc0G07LKv67FEFvujMyAqYkH0DXjljZHI0cC07YeRCUBPPM0wYBwtql66LMA0GVI8+3HYmrjl1xawzlsR9y5K1yjBypyOHxE5nL2ngSmSCz2GSAcSYl6FR27BA64aRmh6tD3DCrwQeIyJ1ptQKO42wzSxJV1u34eykpUyMt+jHZBO4AxQOvsXCOU5MZv4CML9OaLUtpuwX6DxqEM0WYxcOEGFru8sM3LUS410r0mj0xLqhHIFPP2asf5igmHBdAXAfCjrs0df7ZoJNA4+QQ4VjAHeqiDFgcSs4Z7dn37r3vK7B8dl4Noy0dto6c8xZ5sjUGUtZ+P+P5F+wa9yXEpfV/Rqw8KS8OI+N48Z6RH2GmtVfppUygHNwQme9z0avG8CeJJnS6QDTP+igThceXC0YfTMvwbr29pTAk+DQOKER0+AwzBybW349nBeGcC+91CoHmZFmZ1Ztj048B0CsuzXRiwLbNkPPE2JmsRl+tcJMy44qIjvZU41zjTPiKwr8baEKdx8gdKvRBczGFWPqTTsvNS4YCxS0daPRZuaCJa2llyl2gLFKj86PLODEmKJKzwCeLIKRJDVSGmlmjDkGx3dPJx6TGTHcepQzzpXZf6DVKer8AS1siehNz3hHZw48epRSK2qY40/i5wCwa3Fa7StAdRZ0YmsTdMKAawm1diH6vv20nmgp/7FufceYW9fCDi4qXwJtZuKESGLtIzYI/uliW2Z+sVefHolbVzb1KKs15pcdPa4cS2YGGo90Xmd0SoztXglV1XMw/yVmdJo4sBPg4qxkHk4A/I1Y+v7Wc8re7CmHUHyD4s+t4vqVdTMkp+QdAI+OCxk2Di/FyEDnJ3Rt58B+BOr55nQMqrLMwv+TQ0b9mLWmdUZOB7R+n1jggJapKkDqHFr8ggYs6N8h2xTS7MCk0WkALvZsmz1W+7jg0GZQJ3BJ342H6bkAe0i2WIIjtb/YkfW+oFX8YGKTVrkLxW194tqxoHLpgh8nuvxcj+mXjS0ulOQRtcwxc5jmMMslEGXyLAeEnlmbs+yE2V2MiyEgSzssm9wWsHEcGpb1KQvgz1AS6jwCOC0VA48fChsWTqXD8k0xo7NxTuf3LxsnWcpsjOnEgzylcOdJYwhk8jRwL5SgzSK1ki61T+AGNJTaAEU2sn5MAI/eDPMsFK+l4XD3OU67/8GmOIMMG6Hvf2bRxhtyikG+d7XIt9sUPcbo3+mJfCNC1/+mV4bv3/TCGP+btvAI6WnXuPw37ePtH9MH4/vftGe0/03xtOsp/uD3b1o2Dv5N64jU8CbDKd4qN10wf2m48v+meDnCFA9LN4tj4+Tf9NWY/ZvisefTL2P4bzrBW4OKbaSaxRuZeUGLI7uSyDRixP9No3j9UPESz/Wf4lUf0yzEY/gy/6Yu5t6j4ZRv7Sne4l26wQO8hygYxcuAinxvUHAf7/EJugx1JfAY9pPAZ9iw/HyzURAvBDSDJt83G7QxsGBHloQZvKV2DZdf0Mf7bMzgBX2X1CvqNbVFbVNvoMUzvHcoWDc8H4KG8SZ6wuKnTDrjqM/loxbjiO96CO6Mruic417iPYJmcIUHe5vBNzwy3Qx+60E8cl4HeDOEGRzh/4Fm8Fk+RjP4iryKkV2KON4e9RqOF06eiVrxS1FzcPoPf+U9FLU9NjzbA0Rv+qLyvVgxFs/wlj7RPI/k5iebB9DE+RYNJ0XSVyKJlogni6z2EPrFwtYxpqMuUTlcEUEd7y/G4rlH9Q4GmnxE7gx1VHVLd4cp55y+wXQshURMqlAa7jkGY6/qoqUEJs++RF3IsL8wJ4djjBBzbB4VkbWHyR46LDkQhbo+Fut24UCrUpjeKDranyO2xMBTbcz6kReTmFU3O0NZu8CBeDCPyrlDC4XF6cbsOPsZTGKQuROEvFzqrgWqXWERmcf4kIIYZh3VzdMj2FVMDrEUPO8i+VeRI3yQHk5dYKqEAFrHMP8e+lDnaC3iw6jtCXr2oR+r/INMjN0e4bPLB+FtYbGaISzoPBaKK47xffLDkwN60WIE8xHAiM1xm80bLM0PbQePuUZU9bCAj7AQj/bh/Tlidv+fue7hNV/re76Mev2ggTetmmu8RsJcNw07/8/JyXFTAjCHBSJ7D9jVR8C60ylfz9p5AriOtO+NGSchwAnfsXXyWJd81HjQme/SXQ0vklpX2XUDbzAw13W86Gt9qo1zDq2IrKpb9nxm1cXttOrFfFR9eNmsWbVlR2biBVd/ghRbVjN4+r1ZzeNNKGY1x71OVb6jHqQe32ZWlYNDNIthn7ze0HVquMqCE+NaXGsunSreqGBWyyy+3uK6vFmVpYniXY7rHu/PMquyMFeCPp4MZlYvDRi4h1HQNFyhf+Z1n328GT3RD1b9xJ6pOqFnYKRFh9zLVEcMv1JfsECqGwM9buVjFPxxsU0xcryATOapqo2lfFuY6x+8xMNc/+K1CnLcciuajmMyG/yHi7cp0eyPiP9DJFP4h0vch0g4nzANbrt7gWn5Hld4dyfc30j05+E4h9zDK+eKyjBdX5h+yqLhxgVP9UD0jzWePumuUE/o2SKFVcz0KQtEoC8z5mwwrY6pVdHUMxqZI+patIKWlRVCGJ+dQAv7KwQt5KkH9MSQckNHEaPfIdHBS7Rkekn/F7vhopr8YvoUHatWhtMD+F+voN4A8luXoj6k2EMTiXiRoZzfR+HZreh3Mx3LXQUw1743OOJ31C61RW1Tx1QupBI+AHOLJajGJ1Quni2WdaTDSXxekRpicXwujjdP9wva5xuo9XcNdWGprTlLX0x3kJ7AvJhne6J5Li8/GiZRNIkBqtUU3o4faW6UbuRQbZSGO4ZPW+3zkxjtU7GIw1hMB6+YemYalsDai8ZcgLLvQc0FtKIr1xGscBKDs/nRm8cYvxlASuMBXQUxK0dYEglUMvP4oNS8wxgXzbCJHlbo/AgfhRPwYPKXk/igrTQW+RFXi85ctM7RdJtccx44jdHMMD8zzKb1vuGW84T66yGm/VgII3xGZolz7+fKG/+mvrJIAu13DHJTyd5xZrBm2zVuJusxp5Fo7nM517ie1jA4qxbnNGY+hBQHL2mRb2JUz+gtCV2HDhnzMZnbXQEDDf3BLd9SMjnh2oGt0dnH522nUcNMYg7NI6xLJUwqbnXKhXHaF6hd4tZwzAzUO0CC48YCM3/8nD4TDXI/EcH6o8pc/hEsxwNsmyqB1vaE69nPCz1YbgnuFi4SDCLRXJ1ymtvMHj5nu4mPxxpgiSsbUTc/jgE/jjjcO352c66SgzNOI8VsYJ/lqtNhie5jJTjkpmlz9uYYZqbGTQXrhupyVrNcxDZGVUXpKgZf7aFFkTvGEGsWscLZVa7MxS49WARVrNFVbN1VfhAdm6El95zYI2SwDVgfP3TjE+Nu0f7mcg9xzShyKyxiBOqae74+ZkCpCT8+7G5si4uxwoXhwfJyfAdUfgILDHy55OQj0tvcMZ1iZTPn2A3F0FMMBe0I9mtWlPuZOXauZhl5DnsuY+s1c2zo4xoyl/OCNV82uh7IUb+5fsULqcw133RteflySG8U7z+yvHkcPFveA7zR0pvB+2ssr4M3TXptOU62vH4DN9u5dVabr4bzXuPVbt4rvB/M8sr3JW7fezBw52CHL3rzlg3cw+4tGQ7uCCzoxkVDyRm095yNG4aSMzHvynDkbNT7y05+5CzH8q75RjbvCwf5xKpD6qMu841XEHonhutU8KnxYaDym+FyC97x4h6/bDIvgDV/AtSkCfyUoM7BNTMCHcA2f5iYpqUKF3TO3BoRNohmidiUKeoU8M9YZPtItH3M3wyA2Z3Oi9HntGktHpgoWwSs1Q2t8px9L3Rv7ipjoRTRe6ZzFdbjegcyr8wPLv/ZPtkw3YDrl4ac3AjkpBdGzgskzml0e0CNycc2NHMMtQ/SGjnACdwAKloBkp/UoU7YJ9Jhliv/aeyY/vIMbM5odKuA5TFpzVNs5oz+2UFj/Qk1T36I4662HgDrrqlxo9EC7MKZzmwT6RJj6T6g5DBC4MQnOuVS40qjqXGt0dI1dZXjjsatxh1jNfnEA0b9Aaq+rohQDWhktO9M41SDIVWNAMFvaPaa0zkGNueAHcsC1k2BluPQ+rhnStlDZ79Iq3DIWCtM55DtnMAUMHce3WdW4wBwt5hYYmgRpNFus8ZdB6gE/tlu2VVHAdkbiyZnnI5QG9QEtH7O1PwVEGCqXQsCciQCvYN6epy+5TQ1FGdq3yRKMVZ8ZZEcNcwWRah8bYkumeo5gPqOqAXd3YbGCRs8sNAvGxyzAQcY4ZArIah6/mQPHIEZsADrW0bgku9aF2BPb2ll6oCTSQG7IX1ZLzNaVeLsmJiFiGaP+fsDZvpZ0h6vdEpPd/DNlNQXndUMoPp57ZwDZoidu9mdSiaZ2K7p1hxQJELfe4LYspT1wCGoNsdlhcp65B1duKmHMGHhInvxSgj3IYcA1wrqxK7oC6aB7BO0loPGvFQXw3KWKqgzbMf8xC1RLjDtha7IMdGhz0dfNMkCX2HAuuvRRyOaYES+PgX+fRqVD6C9hh7cs1hYPmSPHIz5AHW6JbphwBqMGTu91ilxjYRGBHDkUAUNsPMTp29CjJNsvjaZEpsBqUeo7fYyv9Ei5HwDzr0akGH9TIwleildqayR1cgBx3rE4/8z0hoZ9hX+Zt1kXtet0zqrAL65nhnmbyViy+yuASdWo4WX0dl4dqGo6Y4BEQW1V9DMDdTysI1VTwExH7Mc3f6YyWkX0fmkM9oE3J/axx5yCRqNORP6L6zXWekM9u6PMxS02fZZR76IosyMJdtMCjCXU21lAbvyDni+aAzfgMwPE9L07cY0TpJE9I95Bw/ElIlmKMqY64DWfoOx1g3gOqZv8QusBzTiS8C9D1XZc/qaemjxe1byf7JEo0BcsOCRzJ5j+D6hriXUXvUIOcASJA6hKjQErPcXWqscEYwSnTLzq1Va12xWu6KxGzEUYYYKsL4zoM/ktHb5vqgTunzfNMZUWaZy4Kugdi0FpPPUO2jxCFp2Q/3vTJozyWrFteUFMmFo4JQNxzpwrsEyvjY0FNauQyZfaINt8M0vRhYaWVF/qHO223J6zcy6bkYjvGH4kd12oEE9qoV8Ae0Z1vSFOPkEUkdQE0dNe0aiBrVrDqC+qIU8fcUnjRHbHrWIHq0fJjqKldJ0qbIPKPdoVE6IwDm7Wr0RJVp2m0geQmMLFor5NPyAu62NEOv12XTyDPxJcdOIe6FYg0yjXoI63jVgt7aAwn7fNHZdhg4mQDLGBOuaCbl7IKKN/inzfC3AekgT325W/4BilTFxoQvKalhjJGnOsPXH2lh/YLBMaUWjIB8Lb3gTLQ+g9qIEmKd3RPKeznlCY5/pG21kiVkfwBoHn4+tShMgUIPWmtC0Lj67AbCOwUgDWG9525vN297QjklYrXi/GwfFVKxXaBcGsC5luRYdyM6vCWxjUHX6CxwVoeZ8Qh92rAdGaQHFnlRcLR+Q/oBGbJZ56gG+LBRHigeyu3KAvyDjgz3ACa/Ycgj1s6V5xHEobDeCrwAbsUApRbzLbOK1xIfA4hxqNi7pu10D1h8z1PWNRofAd5ygkCJaTLEi+8RHgHhtAA4OvAQqR2c9TRyF2ZGcDcHC+YrgxkUM8qz5uSPwhS7or3TmklaClptjLshSyckK+gO4FTRGl/WbBJZNxitlYlvUyANO+IzpOaiSU3xBcsSEYQIYMq3xydIDBqaXTMY6JNZelVaUjY45hHiYnf+wgOm/Yl43wLysrGC4eeoaMLe07NtvjSnRHDHlrwM4PjedlUvt/GW7M6bY0yciTEs9zWlFWFO1FTOPP2hFT3TsFlhesNYgxoxkEfDtdDe61c6n4dXdpIBGjW1nnJb9S1JW91vAbHaApB8a7jHiOwbSORoLGmrWpnVb0NYjLXMIOAe/wMrN0BkNV0r38cA+8KWYNOKXUOs1zbwVLfNlQOtzyfzJCxM5wCUbOakrRi44DCfTJMIyIxlj44G6FlDL1QBUNkprlKGl+oA9y2jkCP8RYNqHrDFkfvlXZ9Q1dMbJI+DEPBpellxtmbJirY0MJy2b5xFgXaWJYpm4CwB2g041OaP1XqMVpKUWYSD4RiPgsNlJFzDDP4CLhc3kHZDb0ZhniLGbBb+uiVZSD8RHq3PKUj9MsX4HRO2WsGdMcc6BmezP44bZSQP+IjT8AHW9Qx0Tal0tGWhB7XCJTfxZArtGybg5pDUuaDDF/Qb17UPVpxcIWTSe84xEdaEn9rSCehJU3aSjkWPLAMfmpJjmRNmB6unuQjccQ11xfFOO1cMRcFbsySdRChJ19s3Zi8f12F5YNCbHgDHcFCGqVJZoxgCzdw9YD0XAfknSMg8J283M7JjoTukcuJjZOifMfWLeAJxUmZmTNJ07lja/dnQu/oBaQ3feYn6mrxN1d7+6n4SsHgk50f8g7AuNOyJYA6yjEa2vX0L1AHOkGBtMADsSpXP5opvrYo2ytt40zgBnf6ZjAe0csPneM4uNKoC7o0vOWTLsZTO3LId9IxeFqrMMYLZqgBXvA/aqCRQ69L0W6MPr2oEjwImNgXWPPhzPCRb0mUeskV+wvMpoxJkROQEqt1A31e4cM2HzyrYjlrDfg8DUZqjySN+erFBhwzOnfkIt/xegEm1argvA7rsBs2TRudpop63hYoPQjvj8P2WlnVsiWtCZirFWgM7TONCY0FhUdIYHWOTYW/JPh5ZshfNhsaol3c0H89+lt5Acm3QAnJiL4UQBc9gD7HZGI6vBmHIFACc2orVIANbhllaAJS3zgVUaO22xmNqwH2vCYk7mSceedekYnSqsU87Y/Pmd7apFOt+XOnbCBtM8Y35dLDykE6fiGIvudiFnP1Fj0YMmPjh9xulvKNYr8VSgVuYRMJufgP3+TCv7Sgu7dUnBFiFWNUSrPCVMWzsZs45c2goQZpZV7Ev2c8KM4Bpwwn8MtUeAGst6GTEyc6jZ7BKrFOCkvLTOxoDlOaSVv6AVSeoG34BaxYjeD4BTUjHibiZmp0zsvxM4phJnp00kQ8TFKxs4n0wpMcXMvQF2J6krmxzR3oROnJ1HjF9Zwp+GGbgjOgeA/xuq8JXwifdcwRiWCFeaCH4BVprWqg+1O48sEXaYcVojGnU6v4rMr/uZaeoiL7rdiCmWK0Tnvqw+E1lOB4B5+kq044Bj3QIHR1D1pUOtJ2DmozFhgt0MEVtHl0oAlaQ2Mkx0nbHG+Fk7U8z35IhvtnbL/H7LbmcIWEdz4KcDtasThvBD4zc/J/FVaNiTLqCug7rVDZ2eLPHCUqon5b9k07sBdjmo6xFqvX8B9vcLoJK/2npj7PeAViNGa/pMTFjK3rCIs8cGx8dsnd0yFGYz664OmGpMZ+eaVqnN2IesMO+Gib3+uwz5HXBp33uIxh0j5hlgW4T69NF5UwJqfoba+zqjTawWgBMO6tiAzeLHAE5D341Unp0c5RgZs5VKJFg3pEdzozvbeljjogokpfGbrAs/gGsKVe0nwO2G2sUHJnhLtDpZZnzqRt/0DSsaOuOgCWCzlrzdOxBgRFW6TBix+JgVWGC+pW6YNHIA3y+N7D1bYIV6kw/tGTC9UmYsC6QB4BBWjOYzoEYtwFS0LBwGCZZpjQxQs6kW8+JTYMNC1l2ZJZ6/dSVp9GEcJKCuNdRbgKr2mEYRarYazPJDMUiJbxn3HFOT1Aw0yHCA0UAbal29ADkXXRMWLNVZpMnmoQ20tqJ+Q+2LGpNnUBx4S5OmnAE/y+IoAv5LGosyYGUXgHl6T6e5BRS+ZQWfGcZO7ojSGHAO2GCmK2o4+K1SLD/r55MsnK4SfxuWepkww7difqbEimGXLvLGzKifmSVxjmQzugSseBHAOodXT5sM3f4QnRCA1WxkmMEekdsjAh8A1i9pZdXpw+UcKXilW30x30kNAKx74ht3AKx1Ut08ICYBbeU0MsSGY8NXwohrobjcL0QxzE5sWf1eZYQxYD9Oo9jXvhyxugJU555W0CH6CcB66DEWvwDalwy1t0DGhjY4bd0Vacx1PS9QpcaZhfP/F0OVR4A9GdLKPQLWIAg41h9jtTSdO4dWIw7ggpcYkRSbfQ10rR6wqzO0lwXiHigO+qTCR5Fp41tdaEfn4ZcewBO79LGShWudghCdanfBlJmMcWCovAJMXJ0Z4GWJovZ2RB9+okPGPvHiA/CFKJphQrjP9NK9trraetXWUFsDjUftnGiMWRE/zcOaMmXB2I6dLVnRPuCgzOQN8Z7U6T02xgHSwKikmJgK6dBa538C+C3m0TArY0DlPbRwDVis3IZ4TgB2q0/rtE5Uv5kZDhHtCJ2KlsPW5r6bzfB1hEQFWE+HdLYrdGKrlHz8xC4xfMEMZYPNAbi8PDQWDajlvwWS31D8sDDEK1JEzf0F4MRo2cUXQDW6jM3LdE726SxcssRAZ9g0/mi4WgAueUp2cAjge0SMvVOim+Igyhcsm7eA+y0NnG5KCVzIfJKTKhtQ+z2iPSc2JaI3ITp9wE58aXyyHU4B8V70mXbuNH40/ndONX41lrrdirh4IbwT7WTz9Z42NjqD3Vl3Mz1MF3EWpPNhqJ2mrvxNJ77+ZLTRro45jAXvtWUBTpgpJnbcYvlkRDte99vJYUkfwFmIGOd5IJmBFi1o5hCqSm+Asx+hhcv1wKPGQMe8tF5ugWiURoU9RUM67wrApT8xXFud8H/dqIbsOTZ4W4Oowu/SG6PegiYdaLAItf4uCewOxce47Q4D4T3qAlpWUE8D6v2jDtkufQiYL11ah012dmrTwmWfDd69gIxSjaEnaO4Gii+fDV63IIpjzbWhunnAyuYAc35FJy45iYXrUGKF7ply86mtEeDg6rdYCZd2Pmo8AbZ5ClRnNPBzkoQWup+Ih8Dpl8QcdmcG2LnfZOHwgPkvxzrjVY9EDr+3sl4fE+h7Kzuwd4058bWvrSnxPgGsSQUwS8+0sn8Arv7g3d1FjTJDhVtaeyfsYBunNXnWJaNAe0mjmGPFRpaJWfko/2TtugfsjgvAUvuT44kOfYMaLey+JRGnloL5GLBw8eDPSL7Rp1L0pR80HgEnVWeR0TUxka10iQfBiVrmB6D8XcCdo+/kmwlvjASPgcId1B5/EhibhAJMT1Vo4Nrk0oh/6XLvGiPCXgPFnG50DZRrND5v2e0UWmJHppcDMjvsvbHQY4gx7YIRf5XYbgAH19tRda2hO/TLbC9kJ9gHlBMHzPEXYGWqgIuG6vToO/QCAaq9WACeFrVNV9DFtA/FQicHDIVYocquYhd0hakBapBNOuxN4ShFMNojmkVW7bMj2yFUj1Wt95S2ZAn/Gsks1DqMAyrhp5UZASUa6nkPcKL/w9T5EVreZ6a4ehoPGn3iZcxM/AQjCA507H88AXbYB5jDNJ3DW6DiYmWc6K7wCF/R8D0UF89XvOK+4tX5FS8zrXh1fiVfXrcMpKEBJuGKIgIXgGW2aMWhuCi/4iWpFS+yr+RIfAg44SfAz6xjRlRe9lszw57eA+sDqPX6AqjymKFVl85sgNYsCZjvZZ3ZBpzoGWPuIFGoEpNDxrxsbrp9rPl5SfQ/mVKUkUyNXQ+Kn52nxqLL6Qcofpue8rfpqbENQvEj89TwL6DlFD1RqF0tAKadYOsvqFUPs/kKGrpmwSzUwyIeppr+HwA/YUuVYQzI+hnZnjPCkeDH5KkRYy5+Up7LrqMImCUbKB5CMYtz2adugEiY8ZyjfT9sZEEzR4yUv+n6vALs2SmtIdMD9MUPqDFdYKoxA2pj6EJ71kBdhvFjjILQNHX4TZ1QN1DT9QHYB0PA/8DIFFoZQb0LqHsLdVQewC7pR47R1mwfZ7FggEYgBhRn0JCM51S+dSoapxonGmdEZwrgkjZ810TXCzi4Jg9nE7BTXW290KreA9YrS9rhN2LYYYorozPPCVUgcGIJywZwLfuU92OcyoDjjOAEB63aGhPtlK3yRJb+JZBU0PwnFL9/neA976KJCNTGN4LknvZoNaa65RmQWzPPhJplnYdLk2K9p4Fyh6XZBr9UnOCl8mj4y1x8RUvNvQJwsIEuZJU5N8IX1CbU7t4DwQY0EIHWb+hpM5zqAAo/ZEtkTV1BIyxlfbhZa8zmUWoM6t+yubmnYQLzFtRXo2tvo8E8D4ul4lAz+wYc67T+OX2dO6D0yQ6WHFIpCSTYRrWOOTIX1DkIAHF27WdWVOqf4W31ogonk2c8ATuT49l9wDY3QP2QBu4IOZPTjTem566ZeFOhlQ1pq6xRBSIuqBNledvSfdFnHerGox0rHsvOvoYHGojmQpxeQN/8UMddp2sOrb5DzbNzwO77GFlC3TtOTxlPNgErzZaZnDYuaDANtyjiFaoeQD0/s3SERuuede6oXWqFarPyawxIy7pQlWG0Adt3CAyfaKR/Gcr2gVoMqoIlhmouAAPFS2hp2GYNsJJnuqDG0QPRCDPl1gM4qTHbdd8ZGx4BBxyAeVHQoC+z1YNK0+e/0JZsIA3Dwp2gDeP4AGrivhNBws0QfiFtGEGqq87Isgp4p1A7rY1fGos3tjlKAqEW1NmwDe5ZwmuGj3QXLqL0xHT/DVHuEjbrmrgBGt1LqTovmtaN8DUUX0x1fjHVDXv7DJiDFKDaP0CIaRZ+dcEroO8BfHfV+d1V53dXnd9dEn5lcjnM+l+ssmFpfHXV+UVV5xXVOr/A6rxGWufXGF7i/MLux14A32PiU34gS7VvMyzsYV/rO6Y9QHFVVJRlYhy2T8+X18dcD7u0Y+Isy35sDuAsqiwH2B76unHA/b9hEp1XZkzXgKOawDJMjUCt1ZSY94Ets3NfULPhI/J7RJId4q4GyT4v06e8LJ7XFu6mOZLj3RHh6wIKvzMe4VX0MK5bBH7+BxJMTHpphaa0lifaaeoiA41HorPR2Gr8sR2ONoElndcPutd9XYUjmtfoe/kAkrqbBCta2SaArxtJKKV1oxyt1omG7NeP5aDriBgfEpi3Y8PqVQHzaA2oQhzAL0QSmi40ZsRrnRl+Wmbzi07zQqeMAazUUrj2o7Fk4llWZ6y0U3dTaTKWvqXzlp3aftYyX9JM6d8BS471Z5+RY45/e8L4jGlWlj61zwzHx67x2xP+busD7tdQZ/+avlAfOI7T5zsjcHUbf2qOakQ0Qhr7GjFAnZ7T+krQ8j+zZI7F7KrOrOQAq2UzpfsL4OS3YmCx4K+zW11Rll9RjrTfAHNXA2zrVVt1jWM6q1tm4vcFcZ52ablMxnATC+Cnc3GgrQBxdkln50w7Y7rmkbbSAD4vyQjoxEJfZ5QJHFUVjazSvizRtIm8LKiC7PIvifchgAu38B0D2KwLhoVfhwryfbYC3HloNsLkByafNAHczFcwiqxZCeqGzMIyKciOOMeGITYZHRK4jQa92cTPhK0+6mzlpg8XJguGf0/nJYiBrhF+BdRoXyMFRKVSSWZ6AqhQCbCOfgCck5bwbkgm3AIBDzS+4XQQalf7AE5PxfAHgPIejYiXNQvXQKzLMv0noPHNNmEGLqC7FKfPqOeMstQuCcX1uxLeKIluOZDkmgPNyEgODdyZdGgUilB3ktNVqItRs1kGPCPqIzR8zvAdp+f030K971Db7wJwEnpoFENMPWEgwoD6+mDdwDMQumJo9Epf8gLwnVIbUIeDwK37h0ZND/GJVa4DbP5D1yVb4wbtQyPAMRbXdC3lA+/J9jEEqg9Q5e8AZnbJUPcYwLe6hF5MhrAFSOj2js5hFMAfnGAcAFY9B/zM6XN52OqnytB8xMZzWRj3hmqaGhZg3fwCzsElYN/uaOHPTxJLxwGz0gJWHe3L6mbnDMUDbFaJaecLcbEmOieMtULMPN1jrzPd4FsBPlkUD4bZugKyX1Abl/QeDM+LNjzAogBNnUHV6a1u80MLV3cfjOMkfe89NqqcE5ZOxPnMg2FN6kCoQ8NuAvgcpSc/XYUacUTLHhMKl/GkRD5I5wHLm342VoUnIH3NsrJg+9LxFjD3ifUf1DnYAWpjMWNaAIpeGk6AIf8rE3FG1jdmOq+SZaWvNTMCh3QO4oT7jDF8MfQNq+jQwg+yYr0eE/YX4C7oIpJ/y61INAO13s+Bn1+oXRoCJv46IaHeAPAwwcIWJsDt87dGMc0Kc7pidbZ1+sDxBQ18y0mh3TWt2wCBQdzKYkixWeQPOPJAw3Qd79gIvwffGlnpoiPfg3tEsQ24hlB7Q988By1GGI/4AXMUYkKwSufTGlCLI1r+BDAMQyM1unpMX8W1EQMWSRbynhHY6+Kdi4/ayrNebsFO/DL3XTnFCwJm/1hbJmBhRevyYE98nhtADZsMZb7oxH2yYvX+iIc34iXA2PurLhJiO1wrFutV0cIPXZKyGenuzumMv+jS78CmT5Vt+M6wonfAag+qcj/04WvqTs5977T1P7oaHaC61hnPgDlhyMMa+GlRKu3vmI0rR4Kbui6fBCqvNM6WbOsKagRYcFPVqGtnmJmuGyJOp439slj+CK2p7Ntacka0B6ivNmBhb94y7IsrWpkT4skEXGPmHyeYUbwGtiU2HjPBbhaIWyZaJgvbuzmt8zUzr3Umbt2VWqOJ7k2jmAIcz0RDFtC1nPx9EvkDwPzxAhZuxRLrbADgzmsxjt+IxZTo5jTybF29BnB30TWXNd7+98CE4R/g7HVplSqMeUpEcKTBFBN3MGAg59op6+mNHDlfAfgv6Y3sKqv0NWa02rTMhBdw21C7+M4M+5n4UnTi9qsbYziEWn+suyswQe0DR0sWwoVCycYP6BLKHRAvd8TMBGZPzOjIR9iW4/cIYNolYnBO55mX6NpE4xewh4zZZ3UifQlYqydgcMHW+AFZfPgngmS8sAMrpBOPDnTKPYCVW4ztIetHWcOy57R2FeB4yBr4fxreWbYD1KgK2G2b1ldZowR4Bwxt7wFr9U5r/Ea4UwD+ZHNphNx0dYdsm3xieuibvTyz0CMjqzsA/1dFWoxpv0xb6qa4a/dCQh3AUXPAwsXfC8Pfh9oY34WBO8IlMqCBUYrrBxoxmYUfyiXwRGNPo8E0jEeS2VB9ZVgmtNZ41Xl3Gm2Naw1Z15qy13kA7I5N668GYOE35fjhh77FLZAfQ7FLwxsGn9gI32VNY72ggd+JBCM2ws/zYpTYyMIdPugjBTjhKC0PE1VrSeQfgQabWfV99r/t6WYTZkQbhP9AO1fs5pP5538M4S+OeO8hZ8XMB/T8cYzVILQuu/orXr66wouMRG1rSewYsW8rgDl8BDw1ah2a6EId/MMW75d8AnBx6gqvI4JrzJY4NBHXN9T6oMuO6Kb4jfzKyHWgyXNocA7FIYNkpTxE9xTAHQBXeJqFueQDepY76oq6NOKiY8NzKfg2wqJPDAwNB64BjVfqs+HqCB7w1KDlPV1XVFlr/IJLw1URyNb0Ibhh6BrPG1ryWWjLOrVmOD1BGQ8yWx7QlaWGDDkaNpf7hjsgiDMeMJw7gY8DU4aS5esOGfj7csKQ1dWdNmQf6s4Z2BLctpSx3ZbhlhXO7RiymbuVgX9Quwz8Qdlj4M/PPsOFPzDLPlw+CvcNmxQN/Kn6SIZqu4/ZosI+aobsZN0z2d4EP2y+pG4M3EPqHuCpQbb70cD/up8MWRPcrwb+T41HPczCxum/WQRTcUPd/JslMJmCZGR2ZllIXlbBf7McnMrw/Zu5Daf2bxYwPL5/5uAR7qbx8W+GxTy7hrQhHelk1pVeZz2Y9/IZzh6Mk3+zvszIrGS4pWbZGP+bVSAN5GDZz2ROLv/Npuj4B2WXMq+zhayI/2Ybw/Nv9owaI2TiA5+9Q95kCc84P5/G7N/sW6YcT95YiCY5nZLF5njwcTmeqCxox+OWxeZ4LIlahbTUtQopZu7JAnE8fRmT45EvXZdA1vU7QRMPgnM8ssd5Fpxpq8pGZaqcb3YEBfZbZM0XBnbSo1UYGa4LMf4M9ShYGCvRH+qUWTPqp5EVHXP6FR+q4xnReDIwFixu88g0kqJ+SVUJj6EOBY7MoUokDVdXEMPlQ5UI4dkZKnGK52qoxDnT8auNoGjAdWv0RFvYFlTi0vA0Be/GUPRFhq8SQ2lybdQiImlbRHWjotYv1HbCmJ6+Qu1beOZ95GxcaBMQ2WxoIzu9FYmjmt3NstKNqD8lUt8TKSPfSbHYX1m02hRJpkW8D/9wpPKM0NoS/aujzDCIFnEM4q+LWAd5fw5imwuoFRLNaMc51IeR2TX0nDFRvgQpwsQYVChGzaHaD+d3gvmqT5GBns1AS9TtQ6iCNPuFM1NJUdE8xdFOrhBMI9GuuEUbf8woIJh9Fy0N0EvjlsvTD8cSsRZmRY7fro0tB/DJwSxRQvU+OXzUqGHW1HaOYPQbDiwNa/XHbARjGIfdv4bufrjA9uE+5SQ/wGGEGqYe4EPaiRRHkBeR9hs6mXtgo2srgyEWfziJtrEn2GgUG/xzwl5uLmG3hq2RkZ2UE47JHskJn8qm7ISrclgmwJMtnfChLG9BERdUnXDPUDVBRzuxQ3bC19ggw2NstOEN1msnvMY2Ev7FP9ac8NxwloIf41t0igduqs4az4lRnaU0Up2ZbCyqM5dxqM67fJMJeIen6uB7Q3WGBh4S1rlmWpauA9nNqY4Pm7jqBA05RVIdt+zAVMcluxDVcbAHVB1luAoC05CtDBfZjwFnfwNYh3nA/v5jrPpEKx0gRnSaJ3u0GifayebWT44x5aZzr/VP/cjRflWjBmSn2jgEXAuo2Qsw8kC9gypXQkcigJVdaRxrZ5YphTLgxC81HnTdvEYICEb/qblsB5eE3QPwMCgxtjPAPqgCjlWic5zViGiwdaAGtQ5PAPcSao722HiYBOTgR3S/CS1b/9RCTke3gBm4AexxAcCT5yR05qNvdQ3gCWCifqinRW1D8/ucDlHD1AC0sdN10yw1vwdkU4ev3/+nlrJDOyV+c4B90NI4pNPeoxUJEIMrwBx3iPcLovXM2HgJKBdTrGgU+JFmKzzuQOE2joxGArCyFlFfEMlLIuQnWnvMrNq6wbvGDWAGf5mSnv9TuC9lAViuGlC5hzrylSRY+pkgB8ACVxJqtntENgrYew22TT8T2Rbx8UosDtjaOSSSsmmt5cRwBajNQFu/gF1TgBzYI+T/IWQrFUTcNHaPxLhE4CFga+MvyRIva/omWULOEgXJY6h1w07wYLYN/u0nutlBC1fQSgkqZwOidjsMqHiUVvAcuKTrswHkLaivA5WTSVF/AhrhtP0un9GfHElXiU4ESK2hwyXUu4C6t1BVqAAuBbWC7/TtDrX1CoTGNLKy5m/xuxY0AzUbZ4DdsIDMB3R+AQ2socEx1JLjdsHhBJpQ0Nyc7V5qgO+SrtE/tZOTzwlxS6h8FLD8LqD+Qx+eAQm8MRGP79txXdjJnohQhSYgZwYw2hHmpfwaQSaeNbUV15Bmj8ZuAVXlU2K/AVh49NyjbOAn2jkkbGbmc9DlC9RsegDfjtldWRy4IzsD1C+gqQ3U9Uk9gVqLOQNXUPcKao92QDULVVEm2/NXWnFCvoTVE24jFQ3toMEW1DzOAN4hNLygMuxp0P/HpIsVcFxlDYc1Zgy8fAK+lTZ+AIWHyz3hXlA1lI/jCbBqLiJOp3ufRmbBjE4LcB3TaJoaAcI/IfpD5hdlO8K/CyzAbN4C9u0pYMWPAbX51Vhq/GmsNXZsMCzS6i7YTs7H2TzJmm3ZOJ8l5RVw3FeAFa0BavYBmO9HtMYpZuJhkJKymDEWnzP2yRTrcAy4ZXN4MdT1BLArt4AT2KdzvgKWDzTyr8xoRmmd/QHWn0kU+nQ+d9nam6bzYwmY6Slxynb26oCZM8GrkfqEJvaglV+oWS8C9qYFRDPQhky/yaFTATBPvgGFRymK86hHa1MD8BG9GU6U+Y4coQg2t1C7/07MZZbfZXPfJ4oeQGmrcgo15RQQvusQYM+qdLqitCIz4PgX6uy7ifgL83dnTBy9MbHoAMUp1C2D+sBdDKLZMtTG7urD8D/TiEwBKyvdjw27dAOYrj+gsoQG36CqOyPw4FJJCLUAx9cDrEWX1t45rQcfS23kw5vgTY+idicP+BQ0+AG1lhagyg1a6TCx+GNGhkbyUKMIuJ6gx33qHdTsMs0+oM/3w4hkfcoeLA64plAzewTYfg8QXEOtxQ8QKEHdY6aVdfYgoiskNaIaYZYoxv6pL+NvAq3ccXoDNd1RwlUEEjWoNegD6RG03KPSY5bWAGbuC0+MEVWfc8AtBybfhr24IlJVIvEGYC/8LStfGHDizLCCPTpbj0zE00a/5dsxQeBATzCW5Xkhs9sH3Aqq5s/A0RXUif0A9uSEeXiKqmR8EvamS6dfjisujWoLaudp2Dj6Eky8wN85VOUO6YtEaEXrgOsI6sRrxN4jYK5YSp2VAWv+TmdSFtSVYd0pwDw+B3I07NIbfeF34oIh947pN0n62g0gtma6HO0Du2NmHMrHKifmwT5geoO0rl8Au/sNWPh0JdY9JZK3TMGTPJt8FKYY8SMmZt90kQpbe1jLvmC+/TXWzj0gkWfjgCxpOWWL54njFKCuv4j5DnCiR4DdShPY214b6zgzSoTlsoAfJpjeFkOfEzZ2B5jxK1bLwJFMC29Zha6gdvodwJbT4rYnalFtJnWgqg3Fk0BbeKWqaOiFOWcscRah0aSaUPcI+hODmk22C1OxBco0+wm7oF75WNocWJsDa3N30OaI2ni/K9Si2ox2oBhLGy95hZ4yGqKeQY970NgVPU2qCcWwpC8v1KugYdbB3Mg0+8KobmTQ94A9v6GF5Bsj9U6jnGEIB0c3so/tAf5X+r6CGmFAjZa6RhcI3NEY5gCsATdGm/VUs8Gy7iLrpeTj7eDeRlHbxJmhrNVzAPtvOaltaSNNPdDxJ0aCeVrJL8D6kF3oLW4LFbVTRKFFvYc2DqD1NfUPmulAEzdQtcoyiRoeQ/1baLGsw4eArwb1WNBUDBpYQEsvnF4yNcp22FfeyYp6DmwdqJ1+AlQgQTSOADNHy0rnmBJhvlleEu0tY700LXtAK1tiZq0O+CSjK7vzAqCyMcJfBuZJqPX7yIzWgphEiHyaseiYSGaA1zBD5RGRPNU4A34SNNIb4kIOMe9lv7skjhZAJAu1XzzAWidct4FiC+rTLnwz3xteGxp4hLZZBY9B7hnWRQw4KkN3a2hjH+q2oXb6jvj+Ag43UHPrJfpjQD0XmbG5IXZPjLVyLB6RPULfSG2hOCnuG/E6tJGElj30vENrD9DcE9R0xQFrQmvbouG/ZCgzZ5tzRmQlezDUSw7AbwIPhjsMtcOHgJlZMgG7FnF+TwBH1WjhG0lSApdAwoFak4HOZwKe8v1grKPa6BAJLztpQWOyLAty8HRG/H0D9mIJeEc0zDqQ2oOqRp9IvjI9uE+k83SepmjZl0CGPvs2TMwrunWWGVnd+vCCqHsBZ58dHFGtnazhRTkWmQGePI3xDeAaQq3OAX3FLwAPay8aajsBzNYnEGTTYo5ptSkbtVjI2bvVYD373QS2stIeSr9hwB4+EeEaoLqPjNXvgIifvv4l0XYzsZlmxtU3YLZSjH2uAAcrhsQ6jNl4OLhYPz4iHgFwzF4ywpfQ4AkUX4klwxwvNKZA5gXq+KHWogX4h4w3mBb80cYBoPwKiPwyu/zE7EeovYgwwXkA2s86e18jAQRGVEbsRBRwy3Z1JMflQ8DCA9NhbYhKHPBc0RjWmfHRAsxtk07nV2MCZH1QHPdIwjgGODg2E8uvWyWfARvP1C4buSRUJa8B++wXaOSheLS6RE7PABN7M7FcWSDVoQ8/zZf55PKygZ8Eyob1PgOwS8GtuDsgUtXGBnAyh7o/WQ8qPHGsyBdcHbAWl7QuLOKsS3SPABtP9hfgA6vIodsdrQMXU7YnhH9Np5kkGjPWjLMYznSO5dAnSbzfAOFfqCrc6tAcKDahZvUIOD7VkScg09aGDLEqB2IxQOHso2qsZlCzlAeObWjqkYEHaCZOZYvRgM3DESLGLJ/0XJMZLANqdkSr8kFrvwtYrkPAHL7RsrNMGRZoHTkEvjxreEu8aD7JBKfJiP9Ul2INFSjq2DErlhdMwSmnOP2CusyIH1DXz4CTCgFWdEOYbZ1CC78I1OWDXdPXXzAjE2XrniyUhhw1VgA7n6G1s4lQFbCOPgDTtWXKdkznQ4k49GnnLRFrAYMA1N2ja8fCqtWl1R+wVU/2NCeGPVJA9Qaq8hHATVfsAGo9bAAzZDMhd6qtAIDlLr5r+myPn5hds8QA6qRknT41fppQey8GKPsbsPxeABuq+HItAj+rCHq/QOCIeTc2gO30lNspmt6zao5VscGcyn5tw6b5B8Ccy/fCmZx+fgJWrwxgVy3GlQU41hVwNIVi5yyRi18g9ECj5TANv8OfGYF7aIxlrCMb0KXLX9QP9rbY6TIVWju2UUETMD8udOwEqMnKcm54fqANH9TsWESgDNjfH0B4C1XZLyB2S6PSIp4bRL8GOJkpW9XadCbjGvvEeES8XLH+QPYBITn+uQPs1prW5IAWfleDlSNOR4C6LgAWvjXEwps6QnJa46Z1WmQsGSPqWbZTWxbDibakvPQA7P0lA7+PCAqy5MLyoTQBHL+EjeQG2khB8VtBmD/sh/E3VKRO5zT81AA1CMUv/mH+4h826hVo5oSeEJWFYuecZj6OQaStj13+Mrql55bRbg1YsVldPraIHHOmAOfgjFZyCdi4KBLh6iRGgxk2dsOScbJjvlUAKjq9nydirGhXj5lYbwAeHRqNAax4yOvpnitA+Y2+WofAL7hSXQYTleV3qXGtcaNxp1HUKGksNeZAZaGNF40fwE5HtZXRyNJZkq/fmJzPpwELJywx2eNeEGqkrRug+sCMjxAR3dH3wsb4OogZKw90raA2XsYjaS8nTPhYEGNWt0JzohfXjY9pRdhX1Mse5zEWqcnSwcMF9wCF48i4kXmEur6h2XsGnj1AwIHGfEzSDZmqqg0A5+px/JVWNCJj3JevyjVx+wxY0zKgHAfwPlEfobUsNJaChvagwS2nmZlgkVAQanbeAYcFzZ82y88/WR5XCRKyjvUB6/0NwLFuQjbXNX2FHPERBGwcf4mFFxYl5BsZl/CNx28aqzZDr8zH4Q4e78dWCu9UScrq6gDqqw8k36EOvoTEp+4AEy8jEvQ6xFGVseQnW7fuCbzOSZzXZcDazZmJF8qIs+2jhWNgPHw0RZwdANgNSKghtVIGLoXiWap3xOIasBYnxHmauHJpKwU41ojWU574vtDOscYEMHt0hn9YsrvURRzihCF75mPi1yGgNkk6ccAqwDUbwegcCKxYOCXDS8shxSlRszT2tfMKsCO0bJwvifNvqa09oFiH4lcmuM40jpjX+T9voRHSFTO0fP+xdF5ryTRLFD7nKnCemSFnUEQQFFQEs4L4KfqbcwQx4MXvetfsA1ZVV1dX5zg9g3W8hfDsLJiKge52BBJhbFiwgj+GOH4O4k1+RKoNaa78ibydS/PmVa7UnlynJyJPlxJ+VGXlfFGu0ZOsnF5BYlYPVdsyDCG5bbB1C3Y74FoZXL0Avf0WxPE/JXuVrP4h5f9Ad/s5MNaHJLcDx4EUDgLtgEQ+Aq+AvMQD4knxMFBsBbJUQBLyspZcs7G7DGFiqtniqglZSIH5JEjrN7wA3fyuvBWiuQLGHXClL1O1EcQ7qkOym2C5LRTvn75Cpv/A9SvZTYPFe0X+ciNybXW5GHZmv0W6U4g33ZYrvQPh38BM1ktA/qTuvTekMLmHuI1rCZMpCMuOxXCEx4jICoFLXv7pnwhPnes2Ri5DYkk5bnyIN38Lab+Bkdn/IE42B3G9lBTHhwH5BylL3f+RDZe1jrn2VyD9B8m+pOGxM56xFdt2QDYD0glINyDtgKxDVtcCx0lAhgH5F5BBQI4C0g/IYUAOArIHibA/wLUTkN2A3ATkKiCXAfkvIOcQp5SD/LqSNRyIN1AO+k+SXd9D3N+8yJKVtWM96wniXycg6apk/DkeH2k/DFxr0vjxIN8LkvU/JLs5l+v9BxKZOQ1c04C8B+QjIOOATALyAFlSsFIkEFlbci3OKwjVa456E5LPgZHEs2SXpxD3swjxr8uQpWU5xkeQBR9M74GlqlC61SHo7MvbvbdC8MKxeWEBvPPAdAx0eSbrhR3+8M2zEawK8S8PIYk7yc5XIT9DqWcWRQ7XIGsy4dwvQKoyVHdAFgBYe4Dkb8FI6UuxCv0nS5sfLsdAp/UfxOvditw3If7BRUCupHL/Lb+ZxUBzHPg9yPV6HbieRR7nJBzcKBz/JOdbQ5MVd7kVaCqiSEE2Oa/yw30FPklIbViFsFGN2AY7AonMlANShEzrgSMDicVBN5OS+uUN5KMD+t679Kgh81o8DEKlZeJUGtdvEB4TRm39swv56srx1Ahk75C1GnhSkSg7EenOQTy2ZlHbKS0GpB6Q+YCUIJH5p8A1FxBrAzGzvydSW4TULsBIug3xel3JBlJ4+YasPoGtkfydiMjTlhSepffnB9rvIpEDSPUEdE5bEC7PxK2h5wJSDEhFpJGGuE4LwtyD13NAngIygnRPAsckIFMIMxGO74AoMP9wx7/KLASy3YAcQtbeA0dP9rrCLeFm4KEoujLQ3REqXLcfeK8HpB0Qm18TtmmoB6QakCKEQ56EzRuXEDf+CeE4JaEjGBOVrqRwfBS4JhA/vQ9ZEnISY+b6R4FVG7ST1jQHEC7PJG3FsAvxu2vyKl/IxV2iZHjJB0dZiRbbEOemIFlGstOyyOOSvHheYAY5tzNLnvSXDsF2oPf8GIR6C1y3kC/rWynbnF8E5AXiJA8g37VAdgeJPgaOT8h4HfSuFgLZNCDLIqu5gGQgH7dyXEvmpeYDxSDK1SjEbcUC4f9tTSCRwkiu9opI91tkbK0ozcfsDGfnwHIHpATT4fUZsOiCfjYL8Z668pG/1/+BTP8DI5cgj1vNiJRK8+I3FHxyoRA8KjKfqHTlnzY+Y4NVR6T9B/G5IpaxVd6bXDxERGUiculLeLoildQqxIkmIJHYsVwvT/J7OJNm70Hhmp78ePSC3yPEZp7HsPf2CTa3Dd3zL3gbLh6tEWTB/Qvkqbbh1a1UqlJpS+U/Q8e7QpKZ4Lt9Aka3hBvgegLNPmGd6B+aS8+GhTfEk1VUGij6jWNUfJmsLBlmSYz7ILtRkuStNxGfSzCHXvpMOFTIPcMYsXmDkgQYzHXEnhKDfy6eJHrxV7A4J1tkxh1fi18DW+v4OqfiSaMzTWJhi6T70X3wlCx5O0PJT2QZa/6f0uTP42vrLZNc/hrGyU0kjclIrClsGM7h96KS9XYMlyjeyMwUGxni6WPO+xsoRD3k9e75w5GeucZG/gvzKlvvMpwYGvkNxyJGJrwi0BuFI19GNsKvhlu8SNDrhCeGq+Ebw14Qsh/mzbM93mLopXh1oJdRgLjQ5RKs17O1pWEpPDAshqPrRmbDl6EPLvF+nISXQx/D8EPo4zQcMTwLD0MfR/ishOOhj7Xwv9DHBtwWHu3wbujj06L7+EDlJ7wR+riG+w+Pc/QuwtHT0Mcl4bjs/3HHtfjhfTgZ+iiFC6GPHKF5b+ODW/0flsbQRyRcCn3MkBIHEzZOhT5SWIiZ8neM/398W+dTTG+tMB+zWiGrb0uS/FAeb6Nw/NHIOwX19hrOPxn5z1bTIff3nLc33ha5DOi9lSmqt9nwu+GcsCTMh/kkVk62kpLEJYnxOsebL4kricM/VDoPs+FZwwr/c/l7aTHMdy2H87wmYbAZmj/iPYCHb9z8k+b8rrkNTGffcmcwG5rfs7zP9wDezZjvG+c9rvNKyWObZvLY4pq1w/dQDN3ODyR6L4ct4I14rxeBawqxjYdh1lWYFzC9BMZnhXNg7A50FgMr/1qSncnWvdRKQq+ZsPh7luiIzQ3lI0hkIQ1ZP5ZjtgVxl3YhXvUGkrgCHf9BGoUIZPohvcahvO7/k6uTNuu7vGLz+KBc3yjX18Ir4Z/wi5p+fLdWEfmyRC1AXBtrcOV8SNLSNuHDMYZkZMJ3YQxLJeEsmJM8eS+UhGKYqCwnfLUl8mmzQxLi3N9AUuuS5Q5F1k9F7hvS2HyDRGL/QdxyD+K/W6TjcOVJ+Cx8AX3/DxIbypG+gbjLlYAUILZ5BDdDkZHNoecQN/UAcUZ5EVuS49fNiez/k8p5XOTiBxK5Bv3uQqAo4l+b+Q/+LtLQ3X6FLEXAoniv24REfkB/GoM4X9ci/rZIfxYSvZH6zlLImfuyQTAU4R/Q/4PE/uS4n4esF0FnpwD5sUL6DTvnB5BmSo61W4jfv4J47W+5Ls8hNvtHeIfIwKrhO2zrYUPv5hQSvZMjX4Hkj0CneRIodCBu6RcSSZxDbI+NrLIFsVEYvciKrFrFXIdtAgBzwjZoAa5sO/gH6d6C7vmSyLJI9jSQtSFeYwZSqYLOxxak+S2FQkKGRvdyPUjdzW6IbE8gSetQl7ZQP4bYIiOiz/HCR0EnsQ+xZQtax9cQW2RELqyd/UJ8mtuFTU0DiHM8hdAHL8KljPQ2NkX25qTuFSCRotSjlwpUk6Jj26MI3yo+E+n0IclHMPoJRmJrEK/Zk2vhA/JnZfSf7aY7kNoJmI5IlJ6HePkpJFUC8/LxXv+DLAijr4FoINFqKPIadib7EG9/NiAlSMQ9kJ+/C/kUxh4lavuQ5Je0qWTzl4/X+IVEreW92OpyRST5ELhmAtdr4FqGuGf9wJWBpGKBIxEoTgNyFQjzgWsEcRazochzONYBo8LYrnAEFqw7P9nglxOZ/w/SfAK9+S2IX47K1TyDuF9DCUsPEIrcvJJWAbakWBlAynegY5sFIz//ydFaE1lbhETliBR3Ie2ogpa6UugmIN5RVcKCBX4Ipw7A9Yb4OOj+DiH8c8Bjmiln11rWLsQ/zkK8OQltWx3hMmEK4jZi0mjvSf9jFfI9kVfGE8nbwLFjC7E5SGQ2AykegX5ZstxUjqtUQJKQ+Afo2r6KsPdFS5PHW5yRY24/GHrdR4iz/wvxu6+Q9gfoXh1K4+lIXplbuWz3hOugJZWNEiRStYnzn43OZxD36RAyFwedzjrEm5xI4yUJaVqoga14HyDeTkak+gGJJH/kN74R2YxA3KslufYth2vW3hsi44eAPIuMDiD+T0Ku9x6kfxh4dYJgNct9VRNhgXpp22r5HeKnfbnueyKLuyK1vPzGcYiNRfOLttqZz7G2yAL5cMQPOX9btppx/tq2dBxVwrXQiHdMR0Xenh0lbEE3igMxhFG4ZLgfGjlwM0AE8AE33AyNjuH+AUfAKcFO4A6BA95EHS3ZmD5as3l+9BvuhEYjfCYAbw+PHuEe4O7gmP2d6yXhMbjj8FjN2YnY9ODs+BJFxSdsYevsZCRJC/O2XHB2yuIXhBVhXVk+4mXlnf8U9lqoF5N37sQ/ypr1hiMjI15S/hvIe2wrX2dH7z7vfOvt450l7O00JerYes7ZObCSdHaOeNF455/snfCa6bptlRNGSJ67HrMycNerZttd50/WLIpzveZKabiNsXk71zNWaW7jPRxNG3mzonOu51gjzURZC83EWaPOzFhW3PUGK+MZT+8iz+R553imZEru+tDS5K4PFOyebcPME+87z5zbetWbuZTkir3DzF84xv9ojsJRdJdZds60wilbTM8MLEvezD/Ft2cbhuhvOLUGzs2C7soTpCoPWwwZ5lKh6DQc2Q9Fvy06g+NQ9IeQf9bTDiBuvgbxnn7kqt2Hoh8s0w1jJ6AXKcuRlsd6KDqyJBtUQtG3cOwfGP8UHwNtgxN9D2fuQefcIv6ivKOfYdthRh/CsW/Qt42NEfdpDHFGPsSriLi/01CUqjd4B6zYo3fKw1045oSi93wPECwK3VD01Qop+sRIZmhDWJQxPQuxsUuOvEjd7NkMAXSAJFAGhqGozcZ94W0oemljqnArFLUpMhKKnof7/4HO6Rakkw0cqxDXdjpy7UBirVD0hiK+sT1clHeDDczybbgujE+EG2AuBnbfwaSZtOVL7xviVQ4gsU/Q9xKSVTuQiOsErpjIy2Eoeq3S+Uekx8wJho1FMJoF1yzHfErHF0ldiLweQ5zEIsR964aiRySVzwyVIV1Hjs4OxN98guTPweSu+A3Qe7EGcEbEJzaK7kKcxxQk1ZbsNQlZ2gOj8vDyVoLDsLf1CHHPdiD+QQUSSdflelqHxCw9ezZJZiCxWdCrJiDxF9DpS8Fm2+h+OPYL+rUoxM3WAyJhpLQRuHyptBcD110oumMz9ibES+5D/LYVuM2uuV+IeyhXJiqZcwBxNizqvq0m8hA/0xKpTyBubxSK8gECgyYQBy6BByAJWBs4sDmsAHELxxAn1ZHrawqJzH4ExGz2bDs3A/HHQ5GrD5HVpVC0g+UOcdhGdw9096ykbNapHEJs6W3YWZAoegDJmv962P2bhSS8wHEbkEWIf5wMXL5cEWveW1aHyxB3ORO4VkQmDiRSPYTEEqCTvoP4WWsT27bGWYK43oJIvASJD8HYVyi6QTltUE4blM6GbdENzHvTthL3EK96AvHrz5BIYk+uhrVErmQuQLpp0O9XIGt3YNzMNq3YliDfcfCzDno7NYjfTUEYGMyx2pH2fRbiPFoDbliljiGRRCMgywGxml6iyJk7DfqA9RS+svgHKRZA99USvRbO/IGxrPBGaINIy5qicA+0Id3QpmPDWFuYAqNWzithdzwHcRZjEFs7GHrr15D2Tcip7DIK25T1NIL4N/nA9QLxdtIQ+o/Jliwf5XBkDiyfgtVQdJ4/QzUsZcHKIugyzkCSAUlD2leh6GLYtx2fkbU6mB+CKzXQXcpBFv5J62U7FK1zABCthi8fwUwajDCQVflEt6HrVSEOUVd5PdQwdRaK1vgWOngAxiSJzwrnwOKH5Heg93QqcmolZouohQeI7+9BvPolxF2YC1z/ybWUhXR7UhxaDRds0b4M8dv3EO8vLyGjfcGK3xU570tlVJFKzho9N7uWRLxnCONtVuNt1kazKwgDbs4snUBaHdC1HasR/+BRrowN+bOWjjTEfe3KdeXKlR/JdXAiV6cOiTD285+RY4gtu9G4tnKeC/tbRYg3yMhV3wpcicB1EJChhLnZwFUOiOWpyDmN4cIpOLcFrpdAmrP5DkH/4hHi/r1AWpZ2vg5i8AB0gDiQBEpAH7Bo4jaR3ojMLwdkKSDxgMQCUgpIPghwF7gWA1INyEJAKgEx04nwwhgsTcD2Izhni5ioRrZoeK0CeusxyLqNcrGw198UyaVFjh4g1QFoO0lD90M+X2fggvXJtGmdQ9zzG0j5BXRO1+V1mperZ3YytvqaQrz1TcjfkWTjAcQ/tXE+afNJHRKprkO85j5k1ZOj+iyNvRmRf2YqZVPODYSZNKUp1ERkJBV2yzeB61muoYiX7wTEWpMT9leuISzEzGEbTSNe5lmuiz+RdAPing/lKlm6Z8LuxQNkpQheDUFvfC6fTgbi98sQh6WFr97rh93bGGT+G/Tcd5HXPiQxDhxWopFwewB681GIX6rK9VqErJ6A+RToLh8HCpZo17pnTKQ0gniNE0jqJ3CcBeQ4IAcBsXL1wn6zJvIYgTixKsTtPQauilxONXCV5VrqBq6FkLO4zobhj8Nip3Jqy1B3UBcu2grcHdQ4ZB6wS3MHOWv/7iDLV2sGGUkcdgKDGTYbgxOb0dzBwNan7uCQvcagJws71kPcwaY0u7bWdQcj6y7u4IU9iDt4DsiT/O9k6cZ2Fu6AA31n8YQ9yOKrjeN+3eVLNfW0MCpko+jXE7zJX8+xCVr8sQ2Ms/ht+wa//hJ+NXwV3tm2w6/fi38M7xuOw7GSkc/wleFf+M6wa4H9+gkTiF9nIzU+JSlvLHTG/9g5vfVtNBgfWoTjA8vjeDkcWQqNm7DfbN7GP7ZZGE/EfoHu2w6BPyxxYzZ8Y77U474dk9G3o3AkY2RgJTSuh5dD46IlZlyy/dg4BcSxyz7YfX1SsKaVxtjDiz2x+8aiwrnxLDnODWXh3ESFOTZzNwtWU87NIuV4o49V3TTDkYaRNTlWhevCTeGGUB+/uunZKsW5ORAO+ASQc3Mmj6Hwlo3nzaP4B8UwCmew/WX5cG4mKqw/GsObbcq6Rn7kuLJ65p97LwwzWYNUA9g08I73YHvyHxm65xPD/6YG+QJQxf/FIE0gb78JZhZAmzfN7xs2hVn3659hMmewLuM7SCtjg4H9Iv6nYWeeuJrYLpUJn0Lz3ZPZO2L8QZrAPeeihUKqqMgxUPBxEz6VB2rooJiqG/woPyTP/V0i0iLYIaWFE4PfawwtLYNVspqeMXAScQSvh6Stpoy6v8ijX4YJAiYxkbjF66iFWvne8HMq9lzmENd2ibqs7CQxWqigkY0ZlimFQhRIA0kgIk0qaJZAbpyS65If/+CPQKQveWaQWydCspuiHHJAdY1UXmEpTfAcCUxnVdpkPPdM7klpuq16IvsxspbexlBHsa+Clw/galaoIlillGJHJOWPeJpTJdCXH3WXIa1JKt2bUpbxJ9g/cphQU6AAilhMkn/l0l+hhIoYiWOfVBYp4OIQoIjcBXKxvgF7sUPAU0C1ICXCJY/hCJ1EO3kFkLskzTx5CfwHqG7+iDRDA0nSHJOUXVEZ/qMcku9UhYOQhpb8sV7zYuOZ+8Z+3n27s9ErMu+bJDLvhb8NHUno+ZH5QnhqWGfk/WT4cD81Un/yaTD3c57h7HNRkoJ8Z8XP8axt/lIWnnkSMn8vyackY+Gb9HfF6yNN8w3xmzZoRua7fIhpvq+PO80f8Fmn+aEMnYT/GXI2FZkfcEIVywgTQocBJDZvfdJvZBmDG4v6hlLDk8MVRoRJYVoYE0aFCSHPk/3GiY2+fmNg0fuNf+HIppEuE0Wjw1jsN8aKaCLdG2aExqtCP4ZjrpEHG7VsM3a/b+ikZgzd+y3DvzuD5VvgFcDZeUf1GHSmrmGtBawZ5NMofeOzf4m7h88csIjzHz4e6LlgvklM5zXDSOEN8VxU6Sgbrk0Nqm1Jc0rXKdqlWSFGI8WGtCuGiW2DeASOTHSS+KTI1QqqCRLlRkeG2R2MbRFRtqWklGCJKLuOVTeDQgK/FeX6xWAVAxFbh1qAVyJcnMfrweCO8spRXC2M+LZi3Aw3yZRXWYElaTGyFyN+dxlhYhP/G7w8B1PO1TmeQ7JXIx1pSrBJ2ReJNU1xrFLeTppidvqkzfnLGs59wPb6xJ5Zhd88xPBfCpMLQ/ixEkjZ1K+Q5qmRdUqhpqIju/VloG6Q7MARj/9MYqovCoNSJEaBtijgTAwOWKc5rI+UDCrLLeSVDPg0ReEMjyX2CYFfi4S7r6WQ09tXX9BMHFsOnxs2bcHlxLQEiv1jqo8dS8IVCafXE79rG3XrQIIHYBiKcQZtUAjFFqwPGpiMs+hYLYBkKFa1NUeM75nF6pwqG+ZCsRyv5hqWzsByDLQ9n2Hxv1Asyx+gG7qVO4jf3ITEX0KxAtEXMFbAbIH48hzUxUrhuRZYXQLTIzDmhWJF0lq0EcUgDiSBPmBh5yyvBnHAPGI8cDOMPYJOqx6KRcO+zQ6Q4wzEtokxnh8YvIZiKd5mMYykvyHu8FSkVhQZH4ZiSVtoGZiWbWNu9kXm5kUacxD/uhaKpTkdNqyUQzHbHPy8iewkRZ7qgasRslHMbS1ApgVwaU2i0UCOGOgXzISt7JtZiPdyJfLqQhzvIhB+iozSgV81FLONwUEf4nWvIM6aFeCPzfGWZ5a4BvFHMP8kfA7Ffm3sM7Co+W6kgYV/A0aU+AcV9RmOH4RiY9gvVdKEephYazIoAWXA6uGeZ3SG3voSZL0Blix3D1TQA2ZfiOUF2y9U1QvB/gvHTZPZxCAO5ACzekGk1zZ5xG7Cvmdt6ojkHpHc4/DiG7i2DCb/QjE+qP0L8Y6sSoaslUFrdSe2PEtA3NQ0FDsNO4++yKQEiVgvjenj8UHn2CGxO6RuH26fJO0FYIk9sMnTwBrKYThmtjWFGVrprnMeaugPlyCesyPXjdlsh//eQLe8Hop1iaVD+XUoyQ6lYV19YxFS/Aa9+TWIW7O2vxFemAOr72AqBnqX1n62baPcgTi2X45tcRPV0Gt4ELdieeRAzqAAWNobRNxk3R1bCUdOhKlQrBWu3YGJDTCTA53T2YCUAmJdYc2mbANr56uUwaqVgdPbZm68TlsU/jUTtX+dZHd1za0l/5rO6l8vMmder9rE7l8vi2+wo7re1sbpeoNJ+HqTqfb6kB3V9YGM7tjOwL8e8h3R628zesXHQzny7wsPweO4+D3hvvBYeBTis1SOYefNwJmi/vOLX2lZfj0MTpFODEo1A+/1G/cGAVIrsoNWah4zMwZtovj9D9X1RcNyxSBxZrCSxmjGB71HBR2AL69Ifol4gdQsYGYhghmsLrhwswAJKDeBJZwvRPKFt5slYMTHz/Gqhusk0/2iNLwXCqaL/eY50vECUlsfXvEpUEtBj9D+yirh8rCZG9gMQKKLJN+tbMNuEnYaQ+syQfn8GSxdAQRaelb20Foife5CEfYDU1k41IvnIa/N00Gv/cHHgtt8ltZr/3I5rW3laPhideq176yHe+1/QhqH1+7yRK+9zseF2wUuz7XnJYlY4/XaGdlJCWPWgD4XaL2fHKvZsnQ59Jk0r8+oDdKfCZZvn57QFUbwmxG7bZ3ic8WS9rlq6fhsWif//MI54cvQnyNL4ifb50++CazPQbtLLUu9+4oR95VHyVOr3xVD7/xF+J+hu+2Kf4A/a6Fjqy4kbeGmcEvYEDaFgZ0j4UB4LBwKT4S7wh3hvqFzvy++L4wJE8K4MC2MCCvCmrAuLArnhD+k1puS2veUJFPhL/L4qvgRmCKUW64aRlzC+jefSJYIFfG/kfwR+2oH9eaXxCTZiaLuPHmoPy3hWwmScIpkpKxsLUjeRZJVokbPmJyqGObXhRuGKhf/klwl97D7cSA/hZ+vWwV57GZeWcD83ltbuloLV0O/D8a5r1zj9C51ge9SF/guL3lkfPlKa7t85pLm5Uc4/mDkj8Z7ucUdMu+yz1XMy12FOOHp9OU/8VlhRciuxKlzAdSpRzhQqS/qsXm9aDsHp85B8O+tpWLAGcaAMwyDLtABega2YR5wdjHg7GLA2cWAneeAbfCA44pBuIZyDd/KhUGpjO+cgX98QLgfFBP4pOCKAAZTeaAOLBgUToETgy/AtrwDzgoGHBNwd4SEREoVQ+/dN3RqHwStGbiZI5KwiwEU/HZLuEYGBkijQBpIAg4QAbDUddH2J4arJCIJ5NbhSETq0yBN9GkP2TFcFpVnuDYcSUxvAxSkn5lFiLY72SDF06n4FUPb1ptg/G5YpCxsYTRgbz9gbz9gbz/geASd/7BCESdzeJNW2+WzORywyx+wyx9wOjRgfz/gOGrA6caA7f2A7f2A7b1BEyBYJoMHyU0q3muAarM9/oA9/oA9vkXelza1U6UYbaM/YKM/YKM/YIs/YGP/e8kwlNcOPc8tjd9va5dXh3I7Nqz9/orl4/DxT7M6gDjdeCj+Zevc+JutglcCUoXEP0LxkVmIP1km43c2yhoUQvF7a71xbkHHH8IlC/wY7kZB92I2FL9Cfm2jtsEDMARMdkOoG+tIBkOgH4ozehoYxzlm/NwG3PiFjZ8GZpfv6sfZrhiYCl0qfozKQdjLv4Tih2Hv9AnibJ6F4hyMxndtsOpBEl4ovh/OX4HumohftvRsobUVFmc2t1mxxtvE1iaiNkKuKutj6waREmjC1fBPD2w9iD8Ixdesoxsch+INLpMYWoeIL1NW3KYxiAOmX4OrBZyZWrSlkYFFXOeeBGjqZczNB2A2K3AVuAW4BbiirR1XRR5nIKtrgew2FC9hs4TNvM11BpaUAjGye4onw1MrqCgTopvn2lHcpsBfEe/Yh5QioJOS4+QnFHe5YBX3wt7qP4g7srh8JkhDkzDXxR2L5feLhvbGk9nfdxu+fkcGzmCe3eRAt5cGNS4gDeqWGmeQkyQrLFldOoNZ8TpoHiTZlephgDPQLaiBJ/zj2tLgXTzf7HcGH+IfhPpjhsGlrOkfGwbcHXEGQ0bZgf6dYaDD5sGO5D1OkJ1Bn3PugQ6s9TzBGWwriUvcQXIGDXk0bcx3BssKtyJJS6gz7wFn3tWwe38Enm0Yrh6IXTNcvjbwUxNDbxQxbOIXWbgDC2eGP8cSTA0dbwft/QbakWtZjYK3p0i6K/LdQzJ8JVS1K2yC7jPy7YEk7/APLnxpS/ypeNm/wXdlmQivHqSex/ySA6buwWlKSbhRoj4xMO7A+0tCNEt/5HQd91pNGtjyx2Us+n34qyfw6Qpb8RE6f+/SvxTeSIIZb+cFzVOy5jXIgvtvIvxFPpuBv4J3s3FJFoRz2Ml+iy/B1/6DXz1XEe2o6JB4feXnj3Q6Q3z9S1LlT1U4wzXhunBTqArr3oknhW5llrCbU9mkqv33Ipi5l6/KIfWquEitv5I17B9K0AZPf5TpCur5D0PblboLNB53YYXVyMKW9Sl3oSs8DON7wHOxhXPr1u7CA6etCwzA7sK79fHJEQe4C382Nk+OeaK28Cv8Zqc2cRDzLsqExe4kYdFMWPtOstYPJ0XCL3D4uhBVRLOKos4TpIUaUdzQO90bJgw33zD5S5PF+fwyL+LM0wm8eS115neEeyyR5nviD4QnwisW+fP3LHK8+TuJnoUvwrFwIpwKmZ2clTtb0zsrF/TyFQ4QnJUvbvrNu9IpKLK8+DklaFb8vCLjLMqb1y3TeY6lU7zp4+6wWXB38jw53KnzKGmnakHdnYiNrO6OR0Z3KDV3J0Mx7KQkTypUTL5/PMPcuRHP0YO7c8jZ905f+ruSn0hyJBwI1xSqxSDv7nTNkXpjMxupsnWN8ETUjzBE+pE5YV6SJE8bIylhTJpxyb/5n4HIryy8iX/n+WNkJMmLLLwKh5IcKWxPuK9PqEXWLZOJpuXO4DiU4FjQQJx7GUos4V7C3QDWrKkkVgmZWLHBNdHBvQ60LXsGJmODbxAPJZhZE5sB9EOJjbBzOh+QxVCCw8gE/99iYJ57Mrpro31ihzh3LIMGFmffCtegAHSAaChxYNlJ/LN5NXGEc0AKTol8aNuLRChxghdPEBM3TOcJVu6JBxTuiezOaifxivMZeCKxT8hG6H1Y8zewYO8K/EaiuKqaYGJLTAIwGQ8vDMRZPnjDI6HH3C+6x/vCc1TnRdd8X/S/QC+n4k/E7zAXvdBonJcO71O9tIXsNp2XTfFN/iDoZapQXNFNRMLORgLiZ20WehnJR39W9DJWOD0wfblj2np5lY0XeszLM8+RnZdLiTQ9vvzHs52ZRf6bZmaBeTwyM2cpjszkLURkhnk5MpMQflhLjcw88lcXM5iIzJzzBGbmn7DPWy0z+zymmdmTfldIw4jMrPFXMzPMnb7txceGSSGLBr9d19MY27a3DcucBLWL8p4V5oRZYUH4aKn225wP+m3O/fw25e63vyX/5c8y2jwi9tsT2nx7JP5YOj3rI6e7VqZzd6yLHxgu3IdZeuwDfzvkPlQYBh9YergPKeE/toEPQ+l3aQ/uwzud++HeanFOi47NFeJ4ZpDxn8vk5HmW9D0XJMkJM6TmOW4V4z/70vfYKfrPMxyFPZ9I6Z9wS8dczx1Ovp7XyNFzUx6/nII9j8XfBaG5UN6xFUPU0CnlDVsXsLUjw86OgVePSOPY0G3NSK+E55eBn3pDMHUMS7donJ8hyDbAxSoqj7vwzQ+wVwF9F7sRDLj3i4pOdrtZmSQtHVSc1KzhzyHaSyv4lfcNMyTLibYNF0jmCsl2bCdobsU46cEOMb9yj+CljgknhV6hrYzsSqK8pvcM568kvjZcJU/ezbysKsFlzLpPMWydzklxQ9nEruuQ4NTI4NKnqHxH4X8VhkLx/7ZJ9im2CTGHrhcvqET+CU9VCg/Ipw+SrBpOU8oDFeBnKJG5KX6PW4b1A6Q9irVP2VSJL0nmfmcUSv7p/6jSIuwwF/IaPpfiG4ucbzUWxNO8vEaHea7RtWbnNVY5lWisyHfZGpzXaEpHJxCNJ07LGte87tm4kOTceqKzuSubI+sMyaS1syRTnsFxKMl/gyUT1kwNkqEkvSMZQzYHlIAcwFBiYCp1VFjeJ2s2dycXwp5bhpRioeQ8McyjtWIxJtlKJUmlgcm4FprkRVMD82ALlmQLljy0/m1gCeNeTPKUCM8AxvokrwQkGY2Sx8DABpKkzbuTUJJZIHkFXAMXwCXwH8Awlnxg7kneEfMYN39jlnwHuOOf/OEmyib3h5xNvSex+cfLFJv6S6y8/rdtk5cv/EmGvj+JMRPbmuvSsM6f+0y2OduebAk3eHY86bIG8ienDBOTIbs9f8IrFf6ER2/+5EC6+5LsiL+36vAnd4rhlnFg8sefBU0mzPiTT8k5RPUnvFPw/QTwnsf3O1zHRpxvPlEZ+t5CuGya301L9PeS2frmuvr3Poos177TFuN3hGdT32WbEb4Xbez6njNwNh/Z92y6yrR2YpsJvaAxZeLzpvxJnjc9E3/MAmy6R8OcdiXpCFclX+ZxizetKUCVxYY31eHY1LeJxIokcRTiSvuZYecgxP87Hxr6x0XwfcXw79YgD3j7l0hrecPavUHrmxDdGcN606AiYz0sOPvbhuUEAV52ZPjZcBHdBmrOLBYqeyj0Cexkfwyz6C6kcddiJLB0hUbpDsvr/5DbWGjy2QboTwznCZhbJYXxnqT/If0i4OIWQcok2W318UxGhDVSQ8DFXwIWlejxjRJyrGgojgxpWHrA/Qja6GR5pRjcCSl1V3zKYRf2dlViUlonbJ281dHyM0/EmU7Bp5EU+zIZRwMLdcfgd10WicKfZgz7JM21snda1+yyW6/6fzjmRKf1aBU4ttpIgN462P01dFJzhvkc7LRm6O13DUtVBM03hdkCm6iUigadOGrdAdLjBgIPwVwWQf+FgFt4eq+vhgstA9dKYsyXhkxjkSDuRxSN1Jf4lNDF9+Bc+B++WyPhWBax7q32ZaGE/mjV0KYZExcnMkAK3avbEH8QiGEnu4nklcRUHamQ9EiamJxHWcl3JVnD7mVMWdo2XH+0MtNCojVlkGlp1dfa5jCitW+LfqfFyOYWTrncWDiysdIt7LFcKXB64Ra2ufhW4B8B3YK2kIUVIXfq3cKI3Vzhhet8hXtp3rCxKVzrbmDhgo1jgT/ccwu6t1OoCmvCBeGsAhTF54RaIRVcdqcFhoFUy1pBisd8qYalNcXxSGoTjm88pDhSSbXDxedQihuaKd4qS93amGRQCaVecN5ZOlNj1qGpCUvZlk0XoRQrXIN+KPVhY3GK9+ZTPxZtiq1hhYs7Fd7ZS7mEdonfZywxtFEpNWOlZNAMpZxw5DSUShFRCq0M+hm4NFwaLmorVYNII5RKoMifx6XmrBpSOXRy6GThsnAFfPNADagDC0CFdfWpNckmZHoEOhtZyebkiiQuQxEGni7EqyxA2rtgPQf6NkYZcStZSHVP2n9FkfsSJGMb4ByPOr2c9ta5fWGbsTXX0oSuhxY5DlQjJ+G5L3D23SRlHpHlatIs8Wgip+9A5KLSn2Gj4OVcq6afJGd4PzwC+WFT+rNg7eBHDWNjGXcJoFn8PFr7+Xkywz8MBD9fOLkIFPpZIcyajQs/LD9+zvDp03ijzANutMViOsoCxY2eCY+FuzT2qB6mRS+F51Y9bvTLyt6NfirUO+0yqrcPozF21lFfvEMXiermWTSvSUpHh5t8+8LPHDJXZ/YssX6GuPzMjyQX7Kkzl0x7fuaZy1uZV2tafoYX6PxMnv1Kpsa7kZvk2Uvy5RAv2RTyCqGX1CGILVAivpE7HhEl2Vd6tuzYMKSXeUnVS1IPk5KaNZNaVUzehF9CbelsVo9sGhnLoT+mnVyzX5xolzfRmDvR2UkyJuTymjNhT+wlZyShi3q2CuPIJTlr1RCJ8fJUJEZ7icQ4oYjE/onnqkQkRn1FYsu04djY2n4kNuKCXexDOj/Cby7exW75N8TYDadSKz+coayMhGOGnRWdUK28SKIryStX4m/ZXMZmFUuJO30xbTR1Vchd4fDgP06S/ewKNZBdpgayqqzsL1EtHbBJzEbo3342TWv1szVpVS2haQoizRFN2rdUpmm3aZpsOmvZS+fYwaX1qnC6ICwK23gyVKW34RjD0jTxNJcW0oxr6WNLT/qEk5s0u8H0qWUwfUYcFGX6wHqSwXEo3Yfrw3HbPH0PPOB8xuMZ7gXuBY7HGmkusaQ5oU7fWhkZGPeG7wd6X5TMRYZxKVJcptCL+nRKkX/jjRRphZHiJkVZ3JKvqrZ4wIcfitzqihRPGesjxTM5/pP3rYKNLIJI8VOBv4R89eZ3l2P7PXa+F/fE6148BuQ8ILdWFr8LtKOiK2vclIwUi/Sj3iy78l6OaurVqbhelVVwT7v7XoTtbi8jPiX8Uqhfdrs9/f1r71YSnhH5vUvJ9Y+ZvVfJnyW5Y63b60vnQNgT7ku+I/6UlXhvKH1trXs8wPd7K0pPU+lcUnq2FC9PfSKb+sKJ4SvodHoQ19mFeFsluZZn5Xf6FQitVw/4xqKRGEPvZIHzycEHn4AZTBkZBjwE8gYbnAhNPE5+JlpDT/RkY5JiVT3RcBvphiuboD9agbhPM3L93UDar3I0VyHOX1Suwj+IV42EouFK1MBPHQl7CAoGHm+shZ2rgXgH3psVbhu6tUcU3yT4FU5R7A7Fj1DZrhk2NjBbHoN9YnMKqyjyAq4Vw514JPOEsWWuBXQ6wk1hhpB/SFZLBPfjsHX8bone7f0Z8s5ZuH6OwvGWpErl7TriJuLMPKyPte4DglRe2rfg/bv4vmFe2Z7OS0Dw/D5hfqrYK1WEOam8GtqewNitS2XvWIhkZUWsC3sCHAJPBKCYPec//GfJVe6fQVe5nCX+HNn2VlVsVxP4Iol2sktC8uJe7YkvGy5OxG4pUFR8Xzxxd5cxSFb8KWlexO2kVQhDxTClqv1RC/mlfC+JoUWpzKHRImtuJS9sIMBQR9XMG0ThGsXAa+vhGgXV+cSnsyl/aj1S6CImrU5T8fx5CEifM1kkrevwvDIcVj78wpn8lMkJucnei1Umd1Lws9fCZ8MmDcIpx2Rph/Cn22q5WPEqM5J8G2aoTefxSfnGeIOUOR8q3g/SkHkhyBOVkVHLfMIvc0GICxpXlcy6eWo3o2gnRNWn6buHlFkJkyXKqUQ5LiNzv7BdulUzok/MU6Zepiokf6UF/I53UfbIq9shzjYda4EeuXAA0HTcsprjjfpcgj7TxmSkpK54Snrd5RtUxln4kcrySfVx+if8EG5Kh9bjjueFdC//Uj3wtSFULY9oq14+K57GWiTJxZ7cjAueNcfIulX3P5FsHOIenkAihSu5zlZsCZRn2zfhJkGGdX6GvX1m3QZWg0Iow24kw7Ivw8WpTMtmngyLvAxPGDIs9DKcdGQGAZiKTPWZjW1tZtqspTI7+DCYZ1jmZFj3ZDj9yNzYvJy5huNLD7ZqE5jzghD/YYC/IT0IZaY2CtsK7xswfxZTmTclfoXx1pnoHeJMhkxw5GvwEMrE4eI232T4MkeGfznPsMTPLNoAn2FBkeHxfGbOJpHMLNu2yQaZZ7lpYNHnrF7MLssObxRnzT2KCnd4YDXitQVvtCGcsDPyRmPmi9Efi/HRf3yUbfRi83k9XDo18I9hnf0D+J878ZvCLSTvR4aVImwhY+h1C+DrSLhu6HYaUn+DtwZcDy+PCeMSZv8avX0fjdQPGttf8NNP+NQVeIZFv9aCr51IPw5G/pN8x7ApttA0XDiH7ROFWz6TOAlvmx3SsiRsCmUxg/XpqgSbwg0FWqEA/ohnHYtZBclg0B3nxD8Ylslh/gW1H9zekRJbniETsx/oOc/wfsdw9UziW+GNItuGT2O8Q5H5T2sSt8G4VOKEd7NRfBfJSvdJKhj0DyLwf6+GizJ7dSMrVcQjJO4exr0rB99H6s/9dwF/TC5az6onKtp7qkmRGvEvfhXHlsyQfy9fDU1pX9Oa7UqmX9Y4pzTp6Z8tXqccbE6veTIwvbLGNOXfy6d71jqny9aqphwITLmnMF2xLnVyxlolek/jjT7SI6J6YmQbLVulRP8TXgovhNfCK6G2LFE9f4ryiM/7YgHnRHUjwjZy6JwKd3SnLbpv6XPOta+JeopMD8nOPyWpSVXvEJ5zkdxdmdduQZ8gXFkQzyrSe4lwAcd74fG196K7nS95zrhfWuzDXlbZbr1ssPV62WJz+7IvyZCt18uFkBMr7+VG/K2Qx0Hey734J+GzcCT8Fv4I/zgoWUkpQWmhI/Q4+TovKgMlYU2oL/ScL/A87lx3X871RtK5NqTnGeS9gbZHejujtyvssJh0elty6KXJXkNKhxzC9nRpa4XVqtMjWU5Pm8OedpC9iXw7JKin0rW1su2NV7qSaNPYe2J3vbKppG9p50XbcHqcwD5bZ04KU4btL9juGHxE4B1PDB2vYuhuO4bLCNwairMEjBTa0niAT5QN504keBXeShwXekj8M7CM+somsAFsE1c1Da4S3Pv5h3iHcNUqyXnJi6/D/3QNL86k90dy4qRh9R2/Eon0tobwByU8/8iO+4RB52kJPCY6f2tXEsrAGzdIuAINPw2LWPE3SZDzOEv4vZZh9R72l0x5Vwicy6oVZFzFnBDqJlMvpmpZVSUkxa+JVzfoqUtU2Ns6FQ7is5xeGDyEsjSqbMWqxyAeyi7YVGawH8rSngxMhYO7LO/hZm0MDmVpetl9s5jdsY6QPeT5RpYNUJbtYPbYtpbZAd5MwVmeO2RpXgYPQD+U5QtV2bUAzMl+08A4+reBWWGGz/JoJsvpXpa2mt0mbbZ1CmVH1qiynNtluZ2V5Rl69od42fBmL61nZq/Ru4O7w+PVwKlsqAS6vCpQ6YhvcwOsMrEMOxWOFpyKnodXXumJG2yd/z50QYRh6I8H+n/8vWnoj+97/vFy+B+Dwl/LSvtv2XrCX57jpw2OJv70QVmX3aXn6kTG1bUU91WoyynuGwOLO+GBmavzGleP01zde3H1TMPVgze3LV7DobvHzO7uS65bMe6ueN2KcY+FvjAi9IQ6xnE1rLl5HtS5OUY9t6Kwi5LXdQCnI96NdZO7G9scebobGyqRXXnoiufGGRdcNk7Fc0zoDvfQGbJVdoc6hhseSXIs1Fe9hkPxZ8IGxzjDDhdfhm+SvMvOl/hv4Y/wUQ/Hhy9KREqvpg91WDycU+h5HqEPFxm+3eGMonaVDL6P65QqdIBSmQouzTNXlMitUyrKl5XY6SZnbyV2zH5ZF1rKempe5oaIX94RvyVc5zyovMRxQXkk/j8euZcvFWqRizRlJhW/rHF4YYYz8nyJx3qlY81XJb4Ptsna0SmdWbyb3LJxSntKwj4XG6r/OGypnnBGVqUfRKqcXkWqDcn5iFqkOuK0qzqhfiLVB9nf1FWLKuvISDUprZQwI5zhJKXqcPbyW5GOy3WASJWz7twO6+McX5/LHVkicrQjg0Iox9cEcqdWoAb9UI4ObGAy5sUcXTm3Cte1DmdwHMptYIDyyn1aNg0qodwXz0xz37aMyHFMlONzcrkra2Q5LlbmnqySckxvOV6eyzFG5bjqkCvjrJCOKvbrmPZ40jnc0oux1uI4Vh3GeXI59DlP+0eNWKsohdLqVTu6wrWTYCremaG0kwxZTlKfC0j25dvjceHOFp9429ENzWRXvtyVcJItNfSCJA0KO6mj06RWHUktVpL3wmvhjfCShnBT4WTLv+F5p38zy0PVmyIHwjec/fs3ac6Ab76F99I51C2Nm15ANsP8SfyGwnXkv8zoXud5jNdNMcx0k0J9vbrLTTuv64uvSafKh6y7fFra6/Iam9e1bcGuEe6SzYSd813wvi3+n3BF2BBuCTcMvUjVcG0e8OV2hUcIDlDbCtQ6wmVhC89Psd+wI7F/sFOxd8JnBPcG/ssaeEoQ5+pPkiKeWbGO4fwrfv66cAfxXw5+OJHkSHhlGPH3xD8ZusvzwqywKFwXbgobwqZwWTgQDoUnwj3huWJU4fhRYVqYEhaEJekcKmvYcQ5koaS4SltCxVJqCVeEO0Ji8ZpH4l+E/0nyIpxIMhW+Cz+EI+GXYvwlxg1wbYzAGcszLswIPaGKpKQiKRWkiZk1QnpPpHWNYqxSC1WEiwsArWBxEaDYF2fxJXGLCYA2tUiaFin2xUuDxr4SRJYWKfPFLrANUIyLq5TWiMbk9GaIe64rVDuaU9ncN8X/E54Kh0KV631fvMp7ri+J6ur+SZJ7YcDfCNX05r6FKrl7lev9myQUl9MlfZGC2nVTBXifkkRlPBcR+pI7wpqwLiwL1cru54Q56ZfEU9RLlPoXlfFF8/lC5j8dCPvCJSH5blKwTaqvSYAmRdyiIJuedCbqMbPiHxDXxT4J6SzNwPoFLMlpkrKmLJDL5pYMEGFTQK00KesmjbdJ9TUZJZq00CbF3CSNzR7wKNvkuHknNoidBuLuKcCtBEH6EhJ3xKeEjlpbXBgTqmyf1DjPKRz/4loSaty5pDbdCjr9V3mS2kZVfoEeY4f7Rc/yvTPDEg2shJq3H+iphi81xlx+CWkFbQ0dXTp1m+y2yWSbtthmdGxTVm26hXdP523TL7wlemebZt4mxe1ZCSmOtlq08uH47+I1OiiXjnLpPAXtd1N8T6jh+Kkj1ED8tCoktXU8/UsarlvWWN3FsP+uIWWb1lGhhJ39vjw/xG/IsyxeY962SmG/JdQY+/4iXmXheUIqsPYn8ZVmiBmJNU94a0IZ8FRqHp2yolx66jMeualdy3hGVirCrJAmXAnG1v+k/SYxZVLbUYHRnbwtBlvXeRX/LJ4O7G9p/tn6FK8RzVEmnSXJNQ1tbQv3hQfCnlDDxNaZ9GOyMBaq88uav3UpflY6GswVyt9iWKmtiyX1bc1KWwjqlGCdPlenhdYpszo9rk4K6lRQnaZUo3vUlc4/xtQ6dVGjkdTplfU8wGhRJyV12n+NkqtT/DVVTelRk62KbItW4N5qbL9V1W/l1UJpxe6vBpzfqsb2K+Gt8FyoudeheblXdfEnQmx2id4raqwtbkvlTaiR8kpTehFjzh+RtMhgC70Wpd+imbQI3KKcWowjLSqhRWm0aMYtCqxFVlqUf4uctoi0pfGNwaFFtlrkqkV2WtRWi6pv0TpazEYt6qhFybXIdyurzO8KtTbYV1O5PxKeSaLRcH9OqJG6tCG+IqSw3DPNgZoPnHsV073G/fsfIc3DTamr32tGudfaZV8j3b4G3n3Nzik1YI1ONbUjdUBP04S/r5ZeaosPEqjhsaGuGkxJDS1JShRdjficqaq09CSVZSEqszSZWeVgUd1tSmHNagH0qC72eCFleT6SPTevITZPWta1gnpkHMrvSxoTqhXllaq8hrNHTcGnpHxdM/CputPpQKhVy/8NqLOdqo/nO0INnaca0x613DhFvr4qVr3vlGF4nZJbF0crW6fg11UHpxey8yBUyeZVE3mN+6cawzS7OkHrzWjh0VVWOmq9HcwuwC5QvAtEsEBhLGhM7choR0Y79JeFa7F3QsXb0TDUYUha0MKyo+x2tODoaPHR0dKxQ9b9qAb7Uy0UT1X2p1oonjpaE5E3d6wBPqGaHmtUP60JNUxvXAvV1fqBRKkcqwlsaLrYUOxjljWRpGp1/CP8lq8mkw0tNcdq0huB/WfxswrVEqpyNrQv2FCaN+LSHMlXY+BYg/ypmt6plsFBlQaVP1aHGtcl0eQ2VoWomfibb0LNWZuavTcvhWo+V/vie0LNdJubwhWh+kxUi8XNBaHG6s2kkNZ56YoNilYzoNYlTl9ZrzG7+E1NZn0M15WHK6Wmr8m3r5mgT2k2GKwb9NYGo1pD80tGO55UwCvNPwMtCjVPekFOGYecD5XhlRbq3bZGZ9XardrMrRrRrRYqt+qLt6r9Ww1E6bR4bRw8bUxu1Y1uI0ItKG/npUn7WQymZ6LKa3adkuk8RvJqcVoMO1PVpBaxjsaZvJra9FhIEecp2zxTe57UqNc6UzWYKQ1J3SxPH1xMqdsNVFoaGFPMTCtJiRn2V1g+rFTkRWNbIdWLZGBFQ3SKGlygCawwc6wEa5GEDGo9nWfIW9FSokaUVZr+ilYCqQDp7gsqyJaaebor1AIlvS3UNHujxpi+F75KvyW5ps68quifRgfNPE72TL7aI9xojXczUliN96dBXBpfbjTipLVePVU1TrQQm2iVNekKgzFRHX2iEs+uScKYEkkzO80RofYSc5idYzbLAM5oTUp/QqrZ1yziHZOxDuIObaiD9Q7tWKOTBqeOFglZqqzD4N6hyjpqD50fJVtrl4LGDw3fTk5dJncm1KruSmN+7ly8Wk5Om++cRo4rjW05LRpzWkznppIvCJtCVcSVetfViVCzUU7WrlQpV1qOa2Hh5FRZV0rhFUVd1aw3VA0MicqfKoFD7XKHMjbUNlXj1VBtfahcDZWHTEm8Ng9DjVdDjSFT5Wqo2Xd6KT44RlDJD7VaHWr4GC7Kjg4chloFTynWKkVRpT9XmRmqxFxlCKsyrlTJflVdqKtaXFDj6mrbr5WX0w0W6YfCYDV/o62L0tFV1rpKZVfp62rE76oGupqSvpSarrpoVxvZL83VX5q3zzVefGk2+lIz/NKIfC6d87xStaSwGrO6yueJRqiytlnl/4SUXaSkzW5ZY19ZTbusrrZxJV7jVLki1IxS0qKxrFhK+8IL4X/CYEN0qtLQCjSOvn9Q1ExAu3OfKKslRoMlWssSFbmE/xKDyRKxLM1LXWuwqwehVgpXz8J3oSrdo6j8lHbYGsD9lE5jHBLYCcSaT1KaT1KUkVfV2KbqbWukCVaWce3XTmU9rnOuuMaMuBZxcS0C8jllQavPuFYOwd5XO133KSq5luHxP0nK4qnKVUWlrb/7pJLWwYAzpk5WNfw/0btX6WermpLiarQJbUS0XvhQC+ipdhM6WOoxgEYS6oPHmq+U00hCrfBYre34XBL12ZT6zbF6ekr94Fgt71ht91g991iDzLHWsRtafSTUFo/VRY//SaJ157H62c+ZJDeSaK461m5lLi7fYK2hjXEqsK/xJ6XOf6y2ntI4k1LnP9bB27FGoeO4LGuznVKFbzxJIp3UsXgqo64BZkM1oi1tmYpSk62r9d5TTmWiXFTbTdIqujqlWNVssKpDtVUSon7TVdlvBHt4CqGrRjJWqVyqLC81DlxqFzpWv6goZ5fqOxWNXJeqk0uV36U27peBDquKGrHViLjGMFmj6dVIY40IaxRojRqsMVDUqLMalVXTJpxWWaMEasx7NUajGl2uRjnVaL810lRj7qpRTzVtVOdUrozENe09lcoUqYm4GrGP1Z/dH+GLcFd7Fp0uNtSHG9qdN7SiaWjV0NAioaFu0IhqLFCHaei4rKEyd9XqGwvi1ZEaOgZsaPRsaJJtBDo94YFQK/HGnkIprg9qNDIbbD+oD6+iyqtoyTTSVN7XaNvXuFNRX+jr7KGi7UpF+7WK6q+v+uhrZKiojfXT4jUEVDTf9UnDpc6xKmqMfYbfS+3b+yeacXW2EBzT9LVQeNUSbF+peSJlFV9dWIv0D7pJhvxlKLgiteKda8Nzrn6UI/Zlbe7PSeay+tuI1C/TBZapmWVawzI9d5mWv0y/dT0NQjrzX9bk4WnCOAyi0ArnXO3+XKPYuY70vKbW2Fqv6djC9bRR9TQheZqiaprcE+rIOXWDhJbGRxqaEloeJJSEkib6UaCvrhIMR1cU7B3R+r5K56ooXgeHvobAkfrRiHbR1FBypUFqpI3PqypjpPXZSGu+VzWOV1kfacodaauigwtnpOrQGXTEpwCLk1C+GD4O5XmTIc+rvHlezMhXgRo3YfL89WaeB0r5JNALPwD9UJ47+/n9cCGUPwuXQvmjcDKU/4fHKiprOHn6neeJeH45nANMtglsI+OpX34dK188Ys/zpmCej9fkz9HhaaOBxf4U9rcsmmfbCcRC+fvwbij/yIdy3As+NOvdpLmjcsOnzb2bE26z3OxKsiFc4+HvzZXZ9W6uudlycxu+CS3ucwl7w7L9yz2O3zMLXQ67Z8eGc12D2rdBpw33YuDcfxiWGga2mCjb4vQJ6TGq3nnU0H/E06/dgl3fMFJYMay8oZgiUAVzXuYGjeM9BAcGSw9I7+Oo7c8SrrQMvzUCE3nibRm0fyUdGq5ESOzkHRP4O7ff8lsDH9NoIPb7aYnnMepGlI2OjP5grwKUcF+SUPe3bjh/Cdu6E14YLjqwNs4b7nkSH4G9AiH7c+BwLB2CrmLamR0Q5Sw5rZBK1yHM6jV+o08pT8AnSsF5Us6y5GxRKrYqsNT/KJ7RWJKJVFTo2VcUKcs6Rb4aw6kiG24pj1SYP1VhLFICrXuAxLTR9apXKFyQjqVVg2IfW5SE90SoehK2r/IfzRj2p7K4iHgMrqtNXOZCv7zh/nsA9IFDHqUOef3AGT5yP2nI5xKc4St3MobPPFYdLkuHi57OcNZ8F/k7XWdYF1a5sDGM8IS//McbYuUHXl8rX9Ib3fI5VxbKvM7rrvfFLwp5L9gt57j8VY4pMO8guWXHOlCB1329Km+8Fk7E8rTaqw54Zlrd09X76o6JCtwTKUwCbtmEVV68qeaFfEXWq+bEl4Q+D5KrnmGhYmYLvAhViMJlePZaSMPyh70FB/ABfQbpxpr/Imir/xv+8ePGFiRDQyeF2C0dGnqRCpJpAcWfTeSprvhd4YHwnzAN7n8qbFTm9wnb7Aml338zfD9HpTFneDEyiJQI7zlLaLy4SGabhquIXecH3HPAJ9CPYtyL32D2ShE9fRmmq4RM5lDMHoOjHRSvMrJeQnHrSjpTfP8dIf+bFxaEWSEJ88aUhL95KztYm9sCKIF1TBVVImkS1HonJR9vCjYbcoObaT1dNel9iB/TGHonXCLp9fRnDr241aTb0/doer48itzDTdL+HvQVp8s2Vzcu12m6l6vCI647Xv6jAV8ei9/l1uHlHo388pYXqy4vuVNw+SP5u/iYsC7JAq/UuHyOLOJem52Iy/sqEfeeF3Hcf9z1cE90vcM904fK3RX5V6W7SC/44BqH++Hrkklhmx5U6PJRiJVbXk5b+U/XJVb4+KC/ci/85lLBSpPLCCt9LiDoFqC/QuP0V2q6afO6zk2fVz7IU+TyRXGBKyfFspVTkdcYi1zXLqZtsjA4DhV5ub/Iy/1F2nQxQg98HSOcIfSZRVCkoxV5sb7I3ZIi37UrHqJyEIAF5h5RkXskxS6wbp26yFc/ii1ghZelirz5UmwCSwCXsItjy1jxDW3u632GiowQxUdMUpbFa7y43V3k5mmR291FbncXuZrqvzDz+S8tSudlSfyn+BGl8zLlXZ6XF0m4me1+dfSKerNNb2+6wjxXjBL6w48RfzTojw54VWnUFx5haPTA60mjX+plpI/Aj9J8mLG8wOWUcpaGUebPM5xySpimyZU9GlKZKy5OeUhjK+t6llO+k+OR0bH8ynUYp3xj9V86smwbmE7pH+w/sT3YnthD2EOxvFZmALtpTbjE/3OU+NvwEmuWEp/vLfFaf+nb4i+N0R5L+81SXnpFh1VJyaaNUIkXbEtc5CkVSAcvx5ZcgMVVqL1tttsse9p88bTdMNvtVTjWI+2pBWnz94eO16UsvXXhhvBMOfb2Lcft/2jj3gvstXk+8+ffbpbvr7rZEnfCsvN0iWxGEj5v1F6gOWbXpfgl8Zh6zF6LP0elZOnK2Qp2T9g3XN018Orzhv5x1NA9a4Dna6jcdw0XPyV+NFxz0LZ5IxeuZZEelhT8B+VjjLjeoWH9BQ0Xvwxu//FQesTo7BfBpiLYz8n3G98r2cpMFdsWvlvwzp2HfD0vJGx3W0ER+H0U3U7PcOEM9peox0u4N1HzL0m3ohwlyQ1efuHIsJlGmpG1x3fDyo3B3IVB+QRbeyqWpyF4RVF4cVLtjlsEmb1SMkZIWoR0rjblSwqalJR/miDQahvPJ2L3Jr94kvQasPqsEGOEyklrFt0Pol8voCB3lsL3/1aUnBhiH2M/mO/+UzJiysUthsmsbxvuXNiWY7lwC4X1OsLpp3R3DJewXu9I+h/sqoyTqJaaxyOsW6Fqlk6xQLlUz5S8noLNGUZKROydvobajPnttPWvdtJ6UJsv9LQjtDqXN/O3eP/c2fpkstl6YUTY4rVNZ+uGUWCLj2w5W3vy1V/jbHGPzNlKcI20WOLGV3GH7YJXbPDFWBsWbQtQ1MX14pr4lnBFyEfCvGJbgfWh4+IHG4HiL39wWvwWXmlWTLGacVN8MsxN8Tpt2wrozNCzLWvbOkXa0PXmkR8fIo935buGpPspfkVyhU39R6gmFvxumbClpOzs4FtD4teW0I/cS3+ApK+4+rvIA8lqH99Z7M9GEMyuYmxMtN7kBqxcK9p9+b6Ix6TvfYF+T5ILQrVIsn+lSFa3kIxamF9bRnIahK0KK8IsOpM2/OuBeCVtKw5/O0OozVnw0hV6Ql8ZjRl2v6w0Lxh+UrcqX6YaN/Vr5TvLBGpgY91slkmHvwacMcLQ7Ee3hVvCrrDJTB7dEc/X7P3otfj/uBUZ5erf7J7s6ANWUV68dqY9xtjHT1rY44+Qnanz2GEieTwQPxTqHYTHGZbc+Te9QTDl7VYvM8OSORNjG5nJ02IyNW4zZ9Z5pSHDPUovow8aZ8iW29Hb9Z0JDamjL9x1DsRz79Tt6KpwpyScE/JfKm7HlW+UIb3D9+kiSSbKSPKUt86TO1z8jSSZ1P1NLW82WWD4m03yutlgYt8k7c7cLvPrBx/Dm1ibOwLPzw0jtukz/v5b+GXo+2PDCoK1hoF3nEdv4cRwAdbNxMHRC56Vc0nm4dcJXv4UeyW8wbNxIv4V24V7QydbUtA1fJf/gc6cYXeCyt8IHG8bLu4CewSpPYGLi+DVBsHHVoa3vHzi3l7yzbFbtvHubfAPZbevAeGCunvLitG93acUb3eEBebB2zrNzO//UYx+f52lSZ8tv9/XO9D9qPi6+HmWNf1Zhp0P/j51jpFpjtlzjq8ezY2t4Oqhuakpz63YmmBug2+ZcYfd+eAsYY6/z57joGSOF9ydjy6xzrFQmdNCJVOxXMylYedSmGVwm8vxnyIfuvf6sUQzO35h2Dq+0uTNfd/jsLudFKYMvfsV8PxNkqz4D+FY+CWcGHamwJ2B/7gFdmdAaxvH4conwWuHho1Tg3ncfqFi6Gw3wfa34Qp23KuyYcRHOTL7i6JPqjynAP9HjP5T1HCxg3jcI9BkybC6AGQA5WCyL3wVjoVKu+0Cj8N1TPhbxLtO7qqNkD/lk/D+lG8J+NOp+Lf/lXSmfWkzXRx+z6fQ+WUmVQsu3LiwIwhFLLaiWLVFEfSRSmutFEX58M+5/r655uTMkkkymS2ZM/op+o0F/uHtlrfh7T/JH8SYJmH6mVHrtM0rPD23yH56zTzRlBfNTz8yjp2qDvLT8O5sajlABWso8dZCeGziPEfvB/V3pyHnYgvHhTKOd6dSFu9wmi/S9a4S8eaCWz/ByW6LWTH37vH73Zm8O484gxUdJE/lrA7kNC4tSyl+k8/w0dnobORpzvMBrOffVbcWKqd1mcksDWnyjabSJXe1c3nyM5NcTONts+zKsJzYxrzxNuaNt6mE3fKKlfvtEV3+7Su7g9ss99pmFf52m5AXhDwjNks0tqmNtzHauc1gJg4L+zNYX4eVy0S8uODPnt8dDBlaw/VZR7MzORsWfNVapEscNyrgRMsbOH4rKb/7Pzhx+k5+LYuespZ5hOPHXR2tp3QU+0Rs3fA2rM4SrsuiCdflMl13kXFqd4nRbfcr96Z7wiCjK3NB3Qpve5dOuuvSS3fdtkIeKAzbYbg/tAOu+5OJnPWaKh1f1PDRvxcZL0P4no2QI19n6Z1vSJ5jrnohWzC4TsvYCAYfXxoLM4l1tFMwJOgZ+GIolw3R6JhAyzVjHcXOqyHMS/i5Q/kRrX4FRobSd3AK0iDJqS8mcIQ2Ol83FlcAfqUsytexcS8HMhxfriKS4SLHxWXF3IDDDyjewC9QIUPHbWMtVo4/KYdLXCG52q9JOxJ/KvFHyX3JnDhOpwhNxgppHX9DROlHKPbXFXZZfjdcyxyNtwbM9Ed5ON827nIat34sNZdXxa+qjB99lx8Zy5NSngtpciebnKMp3TW457ABuMJ8FXCv8wfoFKTJRXpu0C7P0rUuOMUp+Y6e3zWbsPifMfcP8LhzZD7XAURwPe6+UybdwaM0PGD3NBX/GisfJJZFslIReoB0KzzbinWmn+Z0eGzYbWX46UlmS59+0KY8seubezoRu4y+n75KbouHYlbR2OnJPdGN9zFjcx9/pGexrp74OqNOt852Um69yEu1njf9zr0pdrDMstO30+1c82bsYLx+Z2KJ7rA4LtoNdHJ298U6w9Hd9/1Fd294XXYf3w/+WXoV6x2XxDxM1o37VUPUrxnjlUfxAc9Zz6hw2WswIewOPn7R6Ot95PQu8mxsbJJEmMQoHkbG0guYcjzchL0cTP6F8wEcEzGk/hjznC/K3svzJBEy31nkkqnQCcsw0RunGYPH6RVWIKXVm0s3MP+a/swyrzj9KO+ZJrvSv6nWO3aN14n4yIrwBo5rVXVUaL8ffUuEHtbcQ6/IgpveLquzemW6Lb1LyT1N1hyzAqaDnV7f6dKL6Kgv0flMZ7VTkJyTzAJ834l4Cs93lvWHhf3vhqiQEUvGeKWJuiWxYsxucVw2FLoGX/oprxwxMknJa+IHMUZ/6SXXxUViLf8whjayC+eSCe/WOFt0kDXa9eJJJkLIy3MPOYVn85W0UtBPVhOxPQyse9nj6XyQM/yO48c5+Y2LOLG9dTibSTkr5pQtcx0cv1HS0XOME/wcx00epDzXUSg8KmRzL3G9wlRl2x7K/3D27sQRjIbH8um94DRvEmFwRV9zcC0OxFvxTtQk2eBelH2ewZFkmWkanIhd8Zt4Ll6IP8SyuCvWxLrYEFvigfhZPBTXRFkZGmyIm+IWBWogW0ODHbEgFsWUiPWh2MZPP2HYbMoZ3uFEtYK8nhNZOmHZZQvatV7ondH1lo3ReGBs/IfiLQv710YrdObX/4jo0RbTiKuI1ZbRejgW4DpCcXaJXK4Z95qgjnalbCzucD73YqySmNu8QMwRYZkTheEM2ZGXKjFaE+XqzNh0nGiLUG2i7RLBD8lJ7d5QahsqZDrKVjlOyp8EivjXAomMDPk9+ZwmsuxSkWU1fpYaM/vT7m8WExzZmb3M2QN7H7P0MbKfCIK5rCxd3Cw7dvn2LRWEb7Ns0rfZP8+3WfKYXQjVr8Z9oQP2UT5OjfHK/5Bni8bo8E3yGuzMJO+gtzGWRSoafEkBV7fg9An1MuqHQ2P9t8HN7TIW9pJozz6JX8Uj1NsSz8RvxsJPtOtKs4y2+SiZiDnOXb9AW78mM5tkw2+ccJa1VWRHStHQwyPSDsdL+I4VMpuBXbuPC6WZAqZQ9NZ0iu8ET90YW3dKty4qFxtNOCF+5R68SvseztqwLYaJfnRCmzdiVsCPmBL2oxaTRiN20HMZhvcug0VYP1pTGL4L+BGz1X6UtLi5O3uqud9WB+dodHMs9c7l8eAbteupE9pbo/vZe5DcEjUb0ftBXVzAZGJUWGI4W7hiaqAwYAhbwIh7tEHdHW2o9X1bZ+H2G+2Be9PsxVuKVvZ8kUmHc9696DzJl8LzjLgtshFKdJ6nG1tnRBSdq/E9f2Da+LyttbzM2IQC+zyGgiqJwovkKTMPhWeZ3y2c0tMY0ul1w1UyNMRCnhtyvW6or0ZDbNG4IYuk8yU7b54vJ3mMIef/4n/yZpeQZx4/T1ch/wvwJ0GcY61rnOOmxrlV7KzlNDXzzKYLce5eeg3949xvHdyyADg3YwFw7kWkExTn5pKfxanIxE2c+6zwB5IbfJrKsdw/1Je5xrrju0Wd9d6hPhMxuBnq/JDg1rCXGY3z3O3xjXhHVr5aZbUEC01jvHJnDPdZ8RPsdMVT8ULsiZfGm7khmhAkWoX+8RXObuHDulgk+PHEWK8q4BR19qdYwbNNMmG8YywfkZmVDAGfbnSiKwIOlaXUHvosYUqRoTvAb+M+Ee5XMJd8zyxouK9RUv6ysNqtMVj0dies7NcpKb7ekG3E+qo8YrtRBcbjBXbzsjKTSRTe7PYUMCxf4PUpcLcKQ/DTohSYJSl08OV3kgKfSgp0KgvsllrY5pCva4VVw50NrtdhsiOOjVHfI8/QD98M+w6tDZvu7MY94Xf8B457kh+M8SYa7/6TPkP4STBa/8/UPZKKpqTlbYjt+4/UA/33bzKFNX15LKiIFwLFvfCR+xOYRnKBdf0u8GUoymBk3zVYnO7W2F0gyuRM7u5RyK4xDFP8YHe5+MfegCL7chRHFqHIWxiuj/E+5pitlboju1Xdeyv+3d+MRxs3ClSlv3ktE4/XFWZCrxsmd1+ZtNzC7oPfOpF8r2Zk65oO39YVXflGWRMC1QdyXX0jp9VnWpmtJfqGWxh09VsfFZs/CPxWnmtttLEPUaU6OnqkGjn6K07EZ3Ek3on34i+xL16J1+KZeCGei5diT/wqdsQj8UQ8Fk/FrtgUP4kt8bN4IB6KbbEh1o3zEo/BM3MXPDZrgueDavBvkmeq4TxGDtz4nJs0PpZ8KnZ5qOMvYlu+nyTPKRZjrGW58ROV8/gvHzXGPxXmSr4DpaAp8UNV1Ic57ufhujT64nqou3d4JVnTf4d/FfKf5Ip8zxXrjJLQ3qTX1saAQGhTyYc2tiFCm8FbaGMzrPRmj730bKFLzIuXRlasSjRWpf8BhmwlSkYJEyelb/bIS5SdEhY4Sk0CU6BKfCst8d2llAGYDoqGmF2OhiPxjtvXKXJzbRAyMPKHWHqhsmyIRnWja7WMtVvD/ie0p0doRxdwrSM5bwwXZ5JHMPXdWK6iTreJ9PoFPj3ieRGMfhak/wXLGWOdIHunOj43xpk75BqnCEHxa2/iK/EfD4nTlWIu/kG9zEndZl8aJV4biwOSKe5JvjRWsyiGvwleqJDWjU5U03UVjcUZCWZLRHFOETPy5Ab4CZdY66HOehRz3adr8ADIfY274TeGutRiwvd4NXzvO29tj28BvlcXG6L+gOu1JWOjKkx4rcJELd2E6a4w+Sv9M8Vm0tf/E5N7K80Dmt5BzsqEDRBeE4MMXuVdKwXlqsUrH5t3+dwKQJn3tfzD4hs6iTKfY/zM0Z+aZSTTivoZX1n8TH/gzV7ky78B0Wts57fzXh4Z97sG9/Zm3BuCWzAHM/AC5PsK/oEJmIJH8AT+gnNDmEckXmsYW9eG+gZYBA7/Yd7oh2tEeDY0yYyfVxLRwHq0A5zosqejYRJn/w6G5C1O8498Hn9Jl35OuBRbSbjUk8gbFZ1Zcps4IflVR+4fzt4qjFMOZ/8GRn0dhLCko8mijlIPci72cFz1yNJlP3GXYqbWpT5Q26T0M88HG+Mc4/izKx310zhh2H93pIzDNk6zqoAbDzhuZRfneV+6yUvixUYYX4y73xFbU6N7uzb6M5/g5wjTjifGyCHv9w3VXUOzZS2irA0VsDDkWp9pW1pUYq6VobpY/krFtdwVL8Qf4qXYE9viNv3h5YKxgoEnQyZR+WCJVGjuKkwpVah4K/xTGpJMgIRkmY5b8hftYZKf5UKSJi0kqRH98j/u3ERPxwq+tYQTKjy/nKFALm+KFPd4hdci1q5HYZNdmMPms5imA/5Upsu52aELufmTTvScutnKSnbLWJsZGnsGV32CbknymmhvlA2LNuHDCZpP0tjA0srQ+NJYPzXE4QKFPS5L1sXGak8hVjlDnxApT8SNU7GCZ+qzZL4nMO20+0gLcMF0X7hgLxzflS3FbuCV7N5LI8OJ3R+U1Vdaqjg/4B7OV/ixas5eI2HOfFaY0/kN82tp6NyF+VSaqjTY/Pm40NgwWNX7kY1kDE8gDTbBisGNbsSZ+CR2xSUSeDHsxQbvSCGqHeO3lpe8JvlcHBj3f6AeNcSOeCWSVHRJJN/USTdfjbUL1KfrxunEEGZbUozErkhmo0kZ33BI/A3CuCL598MXcddY/ZbwjwVu5OOB+M9u9hSj51U7/9Boz7Fqp2gb3YWD/bxxv2LwlYlxr07g1a/w9Atq92y0UUSV31IsiT5SC/9amggkUCPFWpng80XxkVA1tASqZRNuc2aPpsozrGI/r7ptz7fK/6HVA8DotnphQa6wNujHKgrjP8bzhf22Ier/MTaKoGIIjyviEgGyEmMxbfTJV2N9F9QM7qCH+EbkCP/hjkJNxTnxUgPpvyVc5yudp84NrYFLWQ4e7SWpimdG/zCAZ1/QHIyMYXwLi0lpVsUlY5y5Ry4eE37jNuEde9l7NxLVj3d00bw74ZrdEZ2zedY0NXZ4qVUsXO2Cw29gyiHVTU19s0uZ67p8lrxIXVD8Lar7WFTtUJyKT+KreCCe0kMraqK2SIc6mmC3K1RP8K2yXVqozsTf4qbIdPkeA4Y9/t/cY95or0+8IS1tGP7h7R7KRNdwIr5ynuGy5DWRSjQMqUR3rRB2oL0tu8z+7DJvtGu3tim+ywfioTFazUvuij/E77A5kW9dbIgXIqmHdB8evxnr7+pTFMEhT8cksJGRfIM82VaYZ+ThSaLIT9O+ye+Bvknz4JuXNCfrmmewe2cdiqGmZoZtGpUqz6bOMLLOPgTu4pZGJrssntGMZPkrNcpikjLK/jV2rNBeGvfzBrfynzHO5IxRbSvhajt8nThlS4bodCyZ+d4QWiKNQwirxp/2Nu8a95fAisE/fDd+WjOEsRWtft2iNGjvGkzhxAGbZ6GlIYY1jPZ0bAxxkChYxB9i3xjSaclOXBS9MSpfSf5gdCtHyGfbxjhD3GhKXL+xnPAVirGvYPTfzyk8UZn/b5vagungRDzk30UrCEOxKQZxxxhnvhlD+t4YWTWTwXq2idNbo99YFKeJkGIEerDQ+pd4tcy04EOc+D9THaPU
'''
| 72,385
| 217,128
| 0.968691
| 6,640
| 217,155
| 31.67997
| 0.971837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154321
| 0.000018
| 217,155
| 3
| 217,129
| 72,385
| 0.814383
| 0
| 0
| 0
| 0
| 0.333333
| 0.99988
| 0.999871
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83be496958b10c094a89decb9073b0008c717663
| 34,624
|
py
|
Python
|
darling_ansible/python_venv/lib/python3.7/site-packages/oci/os_management/os_management_client_composite_operations.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/os_management/os_management_client_composite_operations.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/os_management/os_management_client_composite_operations.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | 1
|
2020-06-25T03:12:58.000Z
|
2020-06-25T03:12:58.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
import oci # noqa: F401
from oci.util import WAIT_RESOURCE_NOT_FOUND # noqa: F401
class OsManagementClientCompositeOperations(object):
"""
This class provides a wrapper around :py:class:`~oci.os_management.OsManagementClient` and offers convenience methods
for operations that would otherwise need to be chained together. For example, instead of performing an action
on a resource (e.g. launching an instance, creating a load balancer) and then using a waiter to wait for the resource
to enter a given state, you can call a single method in this class to accomplish the same functionality
"""
def __init__(self, client, **kwargs):
"""
Creates a new OsManagementClientCompositeOperations object
:param OsManagementClient client:
The service client which will be wrapped by this object
"""
self.client = client
def create_managed_instance_group_and_wait_for_state(self, create_managed_instance_group_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.create_managed_instance_group` and waits for the :py:class:`~oci.os_management.models.ManagedInstanceGroup` acted upon
to enter the given state(s).
:param CreateManagedInstanceGroupDetails create_managed_instance_group_details: (required)
Details about a Managed Instance Group to create
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.ManagedInstanceGroup.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.create_managed_instance_group`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_managed_instance_group(create_managed_instance_group_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_managed_instance_group(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_scheduled_job_and_wait_for_state(self, create_scheduled_job_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.create_scheduled_job` and waits for the :py:class:`~oci.os_management.models.ScheduledJob` acted upon
to enter the given state(s).
:param CreateScheduledJobDetails create_scheduled_job_details: (required)
Details about a Scheduled Job to create
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.ScheduledJob.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.create_scheduled_job`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_scheduled_job(create_scheduled_job_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_scheduled_job(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_software_source_and_wait_for_state(self, create_software_source_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.create_software_source` and waits for the :py:class:`~oci.os_management.models.SoftwareSource` acted upon
to enter the given state(s).
:param CreateSoftwareSourceDetails create_software_source_details: (required)
Details about a Sofware Source to create
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.SoftwareSource.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.create_software_source`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_software_source(create_software_source_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_software_source(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_managed_instance_group_and_wait_for_state(self, managed_instance_group_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.delete_managed_instance_group` and waits for the :py:class:`~oci.os_management.models.ManagedInstanceGroup` acted upon
to enter the given state(s).
:param str managed_instance_group_id: (required)
OCID for the managed instance group
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.ManagedInstanceGroup.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.delete_managed_instance_group`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_managed_instance_group(managed_instance_group_id)
operation_result = None
try:
operation_result = self.client.delete_managed_instance_group(managed_instance_group_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_scheduled_job_and_wait_for_state(self, scheduled_job_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.delete_scheduled_job` and waits for the :py:class:`~oci.os_management.models.ScheduledJob` acted upon
to enter the given state(s).
:param str scheduled_job_id: (required)
The ID of the scheduled job.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.ScheduledJob.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.delete_scheduled_job`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_scheduled_job(scheduled_job_id)
operation_result = None
try:
operation_result = self.client.delete_scheduled_job(scheduled_job_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_software_source_and_wait_for_state(self, software_source_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.delete_software_source` and waits for the :py:class:`~oci.os_management.models.SoftwareSource` acted upon
to enter the given state(s).
:param str software_source_id: (required)
The OCID of the software source.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.SoftwareSource.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.delete_software_source`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_software_source(software_source_id)
operation_result = None
try:
operation_result = self.client.delete_software_source(software_source_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def install_all_package_updates_on_managed_instance_and_wait_for_state(self, managed_instance_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.install_all_package_updates_on_managed_instance` and waits for the :py:class:`~oci.os_management.models.WorkRequest`
to enter the given state(s).
:param str managed_instance_id: (required)
OCID for the managed instance
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.install_all_package_updates_on_managed_instance`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.install_all_package_updates_on_managed_instance(managed_instance_id, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def install_all_windows_updates_on_managed_instance_and_wait_for_state(self, managed_instance_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.install_all_windows_updates_on_managed_instance` and waits for the :py:class:`~oci.os_management.models.WorkRequest`
to enter the given state(s).
:param str managed_instance_id: (required)
OCID for the managed instance
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.install_all_windows_updates_on_managed_instance`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.install_all_windows_updates_on_managed_instance(managed_instance_id, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def install_package_on_managed_instance_and_wait_for_state(self, managed_instance_id, software_package_name, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.install_package_on_managed_instance` and waits for the :py:class:`~oci.os_management.models.WorkRequest`
to enter the given state(s).
:param str managed_instance_id: (required)
OCID for the managed instance
:param str software_package_name: (required)
Package name
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.install_package_on_managed_instance`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.install_package_on_managed_instance(managed_instance_id, software_package_name, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def install_package_update_on_managed_instance_and_wait_for_state(self, managed_instance_id, software_package_name, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.install_package_update_on_managed_instance` and waits for the :py:class:`~oci.os_management.models.WorkRequest`
to enter the given state(s).
:param str managed_instance_id: (required)
OCID for the managed instance
:param str software_package_name: (required)
Package name
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.install_package_update_on_managed_instance`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.install_package_update_on_managed_instance(managed_instance_id, software_package_name, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def install_windows_update_on_managed_instance_and_wait_for_state(self, managed_instance_id, windows_update_name, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.install_windows_update_on_managed_instance` and waits for the :py:class:`~oci.os_management.models.WorkRequest`
to enter the given state(s).
:param str managed_instance_id: (required)
OCID for the managed instance
:param str windows_update_name: (required)
Unique identifier for the Windows update. NOTE - This is not an OCID,
but is a unique identifier assigned by Microsoft.
Example: `6981d463-cd91-4a26-b7c4-ea4ded9183ed`
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.install_windows_update_on_managed_instance`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.install_windows_update_on_managed_instance(managed_instance_id, windows_update_name, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def remove_package_from_managed_instance_and_wait_for_state(self, managed_instance_id, software_package_name, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.remove_package_from_managed_instance` and waits for the :py:class:`~oci.os_management.models.WorkRequest`
to enter the given state(s).
:param str managed_instance_id: (required)
OCID for the managed instance
:param str software_package_name: (required)
Package name
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.remove_package_from_managed_instance`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.remove_package_from_managed_instance(managed_instance_id, software_package_name, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_managed_instance_group_and_wait_for_state(self, managed_instance_group_id, update_managed_instance_group_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.update_managed_instance_group` and waits for the :py:class:`~oci.os_management.models.ManagedInstanceGroup` acted upon
to enter the given state(s).
:param str managed_instance_group_id: (required)
OCID for the managed instance group
:param UpdateManagedInstanceGroupDetails update_managed_instance_group_details: (required)
Details about a Managed Instance Group to update
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.ManagedInstanceGroup.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.update_managed_instance_group`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_managed_instance_group(managed_instance_group_id, update_managed_instance_group_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_managed_instance_group(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_scheduled_job_and_wait_for_state(self, scheduled_job_id, update_scheduled_job_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.update_scheduled_job` and waits for the :py:class:`~oci.os_management.models.ScheduledJob` acted upon
to enter the given state(s).
:param str scheduled_job_id: (required)
The ID of the scheduled job.
:param UpdateScheduledJobDetails update_scheduled_job_details: (required)
Details about a Scheduled Job to update
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.ScheduledJob.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.update_scheduled_job`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_scheduled_job(scheduled_job_id, update_scheduled_job_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_scheduled_job(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_software_source_and_wait_for_state(self, software_source_id, update_software_source_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.os_management.OsManagementClient.update_software_source` and waits for the :py:class:`~oci.os_management.models.SoftwareSource` acted upon
to enter the given state(s).
:param str software_source_id: (required)
The OCID of the software source.
:param UpdateSoftwareSourceDetails update_software_source_details: (required)
Details about a Sofware Source to update
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.os_management.models.SoftwareSource.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.os_management.OsManagementClient.update_software_source`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_software_source(software_source_id, update_software_source_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_software_source(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
| 53.68062
| 245
| 0.698273
| 4,481
| 34,624
| 5.120286
| 0.051997
| 0.039662
| 0.050994
| 0.044587
| 0.943558
| 0.939679
| 0.932183
| 0.923248
| 0.917451
| 0.915969
| 0
| 0.001838
| 0.229956
| 34,624
| 644
| 246
| 53.763975
| 0.858713
| 0.458324
| 0
| 0.81338
| 0
| 0
| 0.026806
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056338
| false
| 0
| 0.007042
| 0
| 0.183099
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83caa497818e740995a97a250fd9887b8a91f834
| 2,748
|
py
|
Python
|
kbastroutils/grismcalibpath.py
|
bkornpob/kbastroutils
|
89bdf8f395234a22a8b1386f028892b07559022e
|
[
"MIT"
] | null | null | null |
kbastroutils/grismcalibpath.py
|
bkornpob/kbastroutils
|
89bdf8f395234a22a8b1386f028892b07559022e
|
[
"MIT"
] | null | null | null |
kbastroutils/grismcalibpath.py
|
bkornpob/kbastroutils
|
89bdf8f395234a22a8b1386f028892b07559022e
|
[
"MIT"
] | null | null | null |
class GrismCalibPath:
def __init__(self):
CONF = {('HST','WFC3','IR'): {('G102','F098M'):'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G102.cal.V4.32/G102.F098M.V4.32.conf',
('G102','F105W'):'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G102.cal.V4.32/G102.F105W.V4.32.conf',
('G141','F140W'):'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G141.cal.V4.32/G141.F140W.V4.32.conf',
('G141','F160W'):'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G141.cal.V4.32/G141.F160W.V4.32.conf'
},
('HST','ACS','WFC'): {('G800L','F775W',1):'/Users/kbhirombhakdi/_work/_calib_files/ACS.WFC/ACS.WFC.CHIP1.Cycle13.5.conf',
('G800L','F775W',2):'/Users/kbhirombhakdi/_work/_calib_files/ACS.WFC/ACS.WFC.CHIP2.Cycle13.5.conf'
}
}
SENS = {('HST','WFC3','IR'): {'G102':'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G102.cal.V4.32/WFC3.IR.G102.1st.sens.2.fits',
'G141':'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G141.cal.V4.32/WFC3.IR.G141.1st.sens.2.fits'
},
('HST','ACS','WFC'): {'G800L':'/Users/kbhirombhakdi/_work/_calib_files/ACS.WFC/ACS.WFC.1st.sens.7.fits'}
}
BKG = {('HST','WFC3','IR'): {'G102':'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G102.cal.V4.32/WFC3.IR.G102.sky.V1.0.fits',
'G141':'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G141.cal.V4.32/WFC3.IR.G141.sky.V1.0.fits'
},
('HST','ACS','WFC'): {(1):'/Users/kbhirombhakdi/_work/_calib_files/ACS.WFC/ACS.WFC.CHIP1.msky.1.fits',
(2):'/Users/kbhirombhakdi/_work/_calib_files/ACS.WFC/ACS.WFC.CHIP2.msky.1.fits'
}
}
FLAT = {('HST','WFC3','IR'): {'G102':'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G102.cal.V4.32/WFC3.IR.G102.flat.2.fits',
'G141':'/Users/kbhirombhakdi/_work/_calib_files/WFC3.IR.G141.cal.V4.32/WFC3.IR.G141.flat.2.fits'
},
('HST','ACS','WFC'): {(1):'/Users/kbhirombhakdi/_work/_calib_files/ACS.WFC/WFC.flat.cube.CH1.2.fits',
(2):'/Users/kbhirombhakdi/_work/_calib_files/ACS.WFC/WFC.flat.cube.CH2.2.fits'
}
}
TABLE = {'CONF':CONF, 'SENS':SENS, 'BKG':BKG, 'FLAT':FLAT}
self.table = TABLE
| 83.272727
| 142
| 0.509461
| 331
| 2,748
| 4.063444
| 0.141994
| 0.089219
| 0.278067
| 0.341264
| 0.761338
| 0.750186
| 0.750186
| 0.750186
| 0.750186
| 0.710781
| 0
| 0.117555
| 0.303493
| 2,748
| 33
| 143
| 83.272727
| 0.585162
| 0
| 0
| 0
| 0
| 0.53125
| 0.56566
| 0.503092
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
83f7000532ae9e03f16c369bf48d00224f742ef9
| 131,404
|
py
|
Python
|
atom/nucleus/python/nucleus_api/api/order_api.py
|
ShekharPaatni/SDK
|
6534ffdb63af87c02c431df9add05a90370183cb
|
[
"Apache-2.0"
] | 11
|
2019-04-16T02:11:17.000Z
|
2021-12-16T22:51:40.000Z
|
atom/nucleus/python/nucleus_api/api/order_api.py
|
ShekharPaatni/SDK
|
6534ffdb63af87c02c431df9add05a90370183cb
|
[
"Apache-2.0"
] | 81
|
2019-11-19T23:24:28.000Z
|
2022-03-28T11:35:47.000Z
|
atom/nucleus/python/nucleus_api/api/order_api.py
|
ShekharPaatni/SDK
|
6534ffdb63af87c02c431df9add05a90370183cb
|
[
"Apache-2.0"
] | 11
|
2020-07-08T02:29:56.000Z
|
2022-03-28T10:05:33.000Z
|
# coding: utf-8
"""
Hydrogen Nucleus API
The Hydrogen Nucleus API # noqa: E501
OpenAPI spec version: 1.9.4
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nucleus_api.api_client import ApiClient
class OrderApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_account_order_bulk_using_post(self, account_id, req, **kwargs): # noqa: E501
"""Bulk orders for an account # noqa: E501
Aggregates all orders on a given date for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_bulk_using_post(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param TokenDateRequest req: req (required)
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_account_order_bulk_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_account_order_bulk_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
return data
def create_account_order_bulk_using_post_with_http_info(self, account_id, req, **kwargs): # noqa: E501
"""Bulk orders for an account # noqa: E501
Aggregates all orders on a given date for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_bulk_using_post_with_http_info(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param TokenDateRequest req: req (required)
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_account_order_bulk_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if self.api_client.client_side_validation and ('account_id' not in params or
params['account_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `account_id` when calling `create_account_order_bulk_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_account_order_bulk_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/account/{account_id}/order_bulk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_account_order_buy_only_using_post(self, account_id, req, **kwargs): # noqa: E501
"""Create buy-only account rebalance orders # noqa: E501
Create order records necessary to rebalance an account and all its portfolios according to the allocation(s) to which the account subscribes and models to which the portfolios subscribe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_buy_only_using_post(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_account_order_buy_only_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_account_order_buy_only_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
return data
def create_account_order_buy_only_using_post_with_http_info(self, account_id, req, **kwargs): # noqa: E501
"""Create buy-only account rebalance orders # noqa: E501
Create order records necessary to rebalance an account and all its portfolios according to the allocation(s) to which the account subscribes and models to which the portfolios subscribe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_buy_only_using_post_with_http_info(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_account_order_buy_only_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if self.api_client.client_side_validation and ('account_id' not in params or
params['account_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `account_id` when calling `create_account_order_buy_only_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_account_order_buy_only_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/account/{account_id}/order_buy_only', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_account_order_rebalance_using_post(self, account_id, req, **kwargs): # noqa: E501
"""Create account rebalance orders # noqa: E501
Create order records necessary to rebalance an account and all its portfolios according to the allocation(s) to which the account subscribes and models to which the portfolios subscribe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_rebalance_using_post(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_account_order_rebalance_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_account_order_rebalance_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
return data
def create_account_order_rebalance_using_post_with_http_info(self, account_id, req, **kwargs): # noqa: E501
"""Create account rebalance orders # noqa: E501
Create order records necessary to rebalance an account and all its portfolios according to the allocation(s) to which the account subscribes and models to which the portfolios subscribe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_rebalance_using_post_with_http_info(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_account_order_rebalance_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if self.api_client.client_side_validation and ('account_id' not in params or
params['account_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `account_id` when calling `create_account_order_rebalance_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_account_order_rebalance_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/account/{account_id}/order_rebalance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_account_order_reconciliation(self, account_id, req, **kwargs): # noqa: E501
"""Create account reconciliation order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_reconciliation(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param OrderReconcileRequest req: req (required)
:return: OrderReconcileReturnObject
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_account_order_reconciliation_with_http_info(account_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_account_order_reconciliation_with_http_info(account_id, req, **kwargs) # noqa: E501
return data
def create_account_order_reconciliation_with_http_info(self, account_id, req, **kwargs): # noqa: E501
"""Create account reconciliation order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_reconciliation_with_http_info(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param OrderReconcileRequest req: req (required)
:return: OrderReconcileReturnObject
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_account_order_reconciliation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if self.api_client.client_side_validation and ('account_id' not in params or
params['account_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `account_id` when calling `create_account_order_reconciliation`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_account_order_reconciliation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/account/{account_id}/order_reconciliation', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderReconcileReturnObject', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_account_order_sell_all_using_post(self, account_id, req, **kwargs): # noqa: E501
"""Sell all account order # noqa: E501
Create order records necessary to entirely sell all the holdings within an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_sell_all_using_post(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_account_order_sell_all_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_account_order_sell_all_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
return data
def create_account_order_sell_all_using_post_with_http_info(self, account_id, req, **kwargs): # noqa: E501
"""Sell all account order # noqa: E501
Create order records necessary to entirely sell all the holdings within an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_sell_all_using_post_with_http_info(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_account_order_sell_all_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if self.api_client.client_side_validation and ('account_id' not in params or
params['account_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `account_id` when calling `create_account_order_sell_all_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_account_order_sell_all_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/account/{account_id}/order_sell_all', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_account_order_sell_only_using_post(self, account_id, req, **kwargs): # noqa: E501
"""Create sell-only account rebalance orders # noqa: E501
Create order records necessary to rebalance an account and all its portfolios according to the allocation(s) to which the account subscribes and models to which the portfolios subscribe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_sell_only_using_post(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_account_order_sell_only_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_account_order_sell_only_using_post_with_http_info(account_id, req, **kwargs) # noqa: E501
return data
def create_account_order_sell_only_using_post_with_http_info(self, account_id, req, **kwargs): # noqa: E501
"""Create sell-only account rebalance orders # noqa: E501
Create order records necessary to rebalance an account and all its portfolios according to the allocation(s) to which the account subscribes and models to which the portfolios subscribe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_order_sell_only_using_post_with_http_info(account_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: account_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_account_order_sell_only_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if self.api_client.client_side_validation and ('account_id' not in params or
params['account_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `account_id` when calling `create_account_order_sell_only_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_account_order_sell_only_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/account/{account_id}/order_sell_only', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_client_order_bulk_using_post(self, client_id, req, **kwargs): # noqa: E501
"""Bulk orders for a client # noqa: E501
Aggregates all orders on a given date for a client # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_client_order_bulk_using_post(client_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str client_id: client_id (required)
:param TokenDateRequest req: req (required)
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_client_order_bulk_using_post_with_http_info(client_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_client_order_bulk_using_post_with_http_info(client_id, req, **kwargs) # noqa: E501
return data
def create_client_order_bulk_using_post_with_http_info(self, client_id, req, **kwargs): # noqa: E501
"""Bulk orders for a client # noqa: E501
Aggregates all orders on a given date for a client # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_client_order_bulk_using_post_with_http_info(client_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str client_id: client_id (required)
:param TokenDateRequest req: req (required)
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['client_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_client_order_bulk_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'client_id' is set
if self.api_client.client_side_validation and ('client_id' not in params or
params['client_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `client_id` when calling `create_client_order_bulk_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_client_order_bulk_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'client_id' in params:
path_params['client_id'] = params['client_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/client/{client_id}/order_bulk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_order_bulk_using_post(self, req, **kwargs): # noqa: E501
"""Bulk orders for your firm # noqa: E501
Aggregates all orders on a given date for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_bulk_using_post(req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenDateRequest req: req (required)
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_order_bulk_using_post_with_http_info(req, **kwargs) # noqa: E501
else:
(data) = self.create_order_bulk_using_post_with_http_info(req, **kwargs) # noqa: E501
return data
def create_order_bulk_using_post_with_http_info(self, req, **kwargs): # noqa: E501
"""Bulk orders for your firm # noqa: E501
Aggregates all orders on a given date for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_bulk_using_post_with_http_info(req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenDateRequest req: req (required)
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_order_bulk_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_order_bulk_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_bulk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_order_status_using_post(self, order_status_request, **kwargs): # noqa: E501
"""Create an order status # noqa: E501
Create an order status for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_status_using_post(order_status_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param OrderStatus order_status_request: orderStatusRequest (required)
:return: OrderStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_order_status_using_post_with_http_info(order_status_request, **kwargs) # noqa: E501
else:
(data) = self.create_order_status_using_post_with_http_info(order_status_request, **kwargs) # noqa: E501
return data
def create_order_status_using_post_with_http_info(self, order_status_request, **kwargs): # noqa: E501
"""Create an order status # noqa: E501
Create an order status for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_status_using_post_with_http_info(order_status_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param OrderStatus order_status_request: orderStatusRequest (required)
:return: OrderStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_status_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_order_status_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_status_request' is set
if self.api_client.client_side_validation and ('order_status_request' not in params or
params['order_status_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_status_request` when calling `create_order_status_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order_status_request' in params:
body_params = params['order_status_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_status', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_order_track_using_post(self, order_track_request, **kwargs): # noqa: E501
"""Create an order tracking record # noqa: E501
Create a new order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_track_using_post(order_track_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param OrderTrack order_track_request: orderTrackRequest (required)
:return: OrderTrack
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_order_track_using_post_with_http_info(order_track_request, **kwargs) # noqa: E501
else:
(data) = self.create_order_track_using_post_with_http_info(order_track_request, **kwargs) # noqa: E501
return data
def create_order_track_using_post_with_http_info(self, order_track_request, **kwargs): # noqa: E501
"""Create an order tracking record # noqa: E501
Create a new order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_track_using_post_with_http_info(order_track_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param OrderTrack order_track_request: orderTrackRequest (required)
:return: OrderTrack
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_track_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_order_track_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_track_request' is set
if self.api_client.client_side_validation and ('order_track_request' not in params or
params['order_track_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_track_request` when calling `create_order_track_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order_track_request' in params:
body_params = params['order_track_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_track', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderTrack', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_order_using_post(self, order_info_request, **kwargs): # noqa: E501
"""Create an order record # noqa: E501
Create an order record defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_using_post(order_info_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Order order_info_request: orderInfoRequest (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_order_using_post_with_http_info(order_info_request, **kwargs) # noqa: E501
else:
(data) = self.create_order_using_post_with_http_info(order_info_request, **kwargs) # noqa: E501
return data
def create_order_using_post_with_http_info(self, order_info_request, **kwargs): # noqa: E501
"""Create an order record # noqa: E501
Create an order record defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_order_using_post_with_http_info(order_info_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Order order_info_request: orderInfoRequest (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_info_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_order_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_info_request' is set
if self.api_client.client_side_validation and ('order_info_request' not in params or
params['order_info_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_info_request` when calling `create_order_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order_info_request' in params:
body_params = params['order_info_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_portfolio_order_buy_only_using_post(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create buy-only portfolio rebalance orders # noqa: E501
Create order records necessary to rebalance a portfolio to the model to which it subscribes. However, these will only be buy transactions, and no securities will be sold. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_buy_only_using_post(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_portfolio_order_buy_only_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_portfolio_order_buy_only_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
return data
def create_portfolio_order_buy_only_using_post_with_http_info(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create buy-only portfolio rebalance orders # noqa: E501
Create order records necessary to rebalance a portfolio to the model to which it subscribes. However, these will only be buy transactions, and no securities will be sold. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_buy_only_using_post_with_http_info(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['portfolio_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_portfolio_order_buy_only_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'portfolio_id' is set
if self.api_client.client_side_validation and ('portfolio_id' not in params or
params['portfolio_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_id` when calling `create_portfolio_order_buy_only_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_portfolio_order_buy_only_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'portfolio_id' in params:
path_params['portfolio_id'] = params['portfolio_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/portfolio/{portfolio_id}/order_buy_only', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_portfolio_order_rebalance_using_post(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create portfolio rebalance orders # noqa: E501
Create order records necessary to rebalance a portfolio to the model to which it subscribes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_rebalance_using_post(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_portfolio_order_rebalance_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_portfolio_order_rebalance_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
return data
def create_portfolio_order_rebalance_using_post_with_http_info(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create portfolio rebalance orders # noqa: E501
Create order records necessary to rebalance a portfolio to the model to which it subscribes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_rebalance_using_post_with_http_info(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['portfolio_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_portfolio_order_rebalance_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'portfolio_id' is set
if self.api_client.client_side_validation and ('portfolio_id' not in params or
params['portfolio_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_id` when calling `create_portfolio_order_rebalance_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_portfolio_order_rebalance_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'portfolio_id' in params:
path_params['portfolio_id'] = params['portfolio_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/portfolio/{portfolio_id}/order_rebalance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_portfolio_order_reconciliation(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create portfolio reconciliation order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_reconciliation(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param OrderReconcileRequest req: req (required)
:return: OrderReconcileReturnObject
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_portfolio_order_reconciliation_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_portfolio_order_reconciliation_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
return data
def create_portfolio_order_reconciliation_with_http_info(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create portfolio reconciliation order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_reconciliation_with_http_info(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param OrderReconcileRequest req: req (required)
:return: OrderReconcileReturnObject
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['portfolio_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_portfolio_order_reconciliation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'portfolio_id' is set
if self.api_client.client_side_validation and ('portfolio_id' not in params or
params['portfolio_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_id` when calling `create_portfolio_order_reconciliation`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_portfolio_order_reconciliation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'portfolio_id' in params:
path_params['portfolio_id'] = params['portfolio_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/portfolio/{portfolio_id}/order_reconciliation', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderReconcileReturnObject', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_portfolio_order_sell_all_using_post(self, portfolio_id, req, **kwargs): # noqa: E501
"""Sell all portfolio order # noqa: E501
Create order records necessary to entirely sell all the holdings within a portfolio. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_sell_all_using_post(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_portfolio_order_sell_all_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_portfolio_order_sell_all_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
return data
def create_portfolio_order_sell_all_using_post_with_http_info(self, portfolio_id, req, **kwargs): # noqa: E501
"""Sell all portfolio order # noqa: E501
Create order records necessary to entirely sell all the holdings within a portfolio. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_sell_all_using_post_with_http_info(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['portfolio_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_portfolio_order_sell_all_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'portfolio_id' is set
if self.api_client.client_side_validation and ('portfolio_id' not in params or
params['portfolio_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_id` when calling `create_portfolio_order_sell_all_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_portfolio_order_sell_all_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'portfolio_id' in params:
path_params['portfolio_id'] = params['portfolio_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/portfolio/{portfolio_id}/order_sell_all', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_portfolio_order_sell_only_using_post(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create sell-only portfolio rebalance orders # noqa: E501
Create order records necessary to rebalance a portfolio to the model to which it subscribes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_sell_only_using_post(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_portfolio_order_sell_only_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
else:
(data) = self.create_portfolio_order_sell_only_using_post_with_http_info(portfolio_id, req, **kwargs) # noqa: E501
return data
def create_portfolio_order_sell_only_using_post_with_http_info(self, portfolio_id, req, **kwargs): # noqa: E501
"""Create sell-only portfolio rebalance orders # noqa: E501
Create order records necessary to rebalance a portfolio to the model to which it subscribes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_order_sell_only_using_post_with_http_info(portfolio_id, req, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolio_id (required)
:param AccountPortfolioRebalanceRequest req: req (required)
:return: list[OrderVoClone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['portfolio_id', 'req'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_portfolio_order_sell_only_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'portfolio_id' is set
if self.api_client.client_side_validation and ('portfolio_id' not in params or
params['portfolio_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_id` when calling `create_portfolio_order_sell_only_using_post`") # noqa: E501
# verify the required parameter 'req' is set
if self.api_client.client_side_validation and ('req' not in params or
params['req'] is None): # noqa: E501
raise ValueError("Missing the required parameter `req` when calling `create_portfolio_order_sell_only_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'portfolio_id' in params:
path_params['portfolio_id'] = params['portfolio_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'req' in params:
body_params = params['req']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/portfolio/{portfolio_id}/order_sell_only', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OrderVoClone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_order_status_using_delete(self, order_status_id, **kwargs): # noqa: E501
"""Delete an order status # noqa: E501
Permanently delete an order status defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_status_using_delete(order_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_status_id: UUID order_status_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_order_status_using_delete_with_http_info(order_status_id, **kwargs) # noqa: E501
else:
(data) = self.delete_order_status_using_delete_with_http_info(order_status_id, **kwargs) # noqa: E501
return data
def delete_order_status_using_delete_with_http_info(self, order_status_id, **kwargs): # noqa: E501
"""Delete an order status # noqa: E501
Permanently delete an order status defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_status_using_delete_with_http_info(order_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_status_id: UUID order_status_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_status_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_order_status_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_status_id' is set
if self.api_client.client_side_validation and ('order_status_id' not in params or
params['order_status_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_status_id` when calling `delete_order_status_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_status_id' in params:
path_params['order_status_id'] = params['order_status_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_status/{order_status_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_order_track_using_delete(self, order_track_id, **kwargs): # noqa: E501
"""Delete an order tracking record # noqa: E501
Permanently delete an order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_track_using_delete(order_track_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_track_id: UUID order_track_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_order_track_using_delete_with_http_info(order_track_id, **kwargs) # noqa: E501
else:
(data) = self.delete_order_track_using_delete_with_http_info(order_track_id, **kwargs) # noqa: E501
return data
def delete_order_track_using_delete_with_http_info(self, order_track_id, **kwargs): # noqa: E501
"""Delete an order tracking record # noqa: E501
Permanently delete an order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_track_using_delete_with_http_info(order_track_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_track_id: UUID order_track_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_track_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_order_track_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_track_id' is set
if self.api_client.client_side_validation and ('order_track_id' not in params or
params['order_track_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_track_id` when calling `delete_order_track_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_track_id' in params:
path_params['order_track_id'] = params['order_track_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_track/{order_track_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_order_using_delete(self, order_id, **kwargs): # noqa: E501
"""Delete an order record # noqa: E501
Permanently delete an order record. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_using_delete(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: UUID order_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_order_using_delete_with_http_info(order_id, **kwargs) # noqa: E501
else:
(data) = self.delete_order_using_delete_with_http_info(order_id, **kwargs) # noqa: E501
return data
def delete_order_using_delete_with_http_info(self, order_id, **kwargs): # noqa: E501
"""Delete an order record # noqa: E501
Permanently delete an order record. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_using_delete_with_http_info(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: UUID order_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_order_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_id' is set
if self.api_client.client_side_validation and ('order_id' not in params or
params['order_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_id` when calling `delete_order_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order/{order_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_all_using_get(self, **kwargs): # noqa: E501
"""List all order records # noqa: E501
Get the information for all order records defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_order_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_order_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all order records # noqa: E501
Get the information for all order records defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOrder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_bulk_all_using_get(self, **kwargs): # noqa: E501
"""List all bulk orders # noqa: E501
Get the information for all bulk order records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_bulk_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrderBulk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_bulk_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_order_bulk_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_order_bulk_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all bulk orders # noqa: E501
Get the information for all bulk order records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_bulk_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrderBulk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_bulk_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_bulk', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOrderBulk', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_status_all_using_get(self, **kwargs): # noqa: E501
"""List all order statuses # noqa: E501
Get the information for all order statuses defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_status_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrderStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_status_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_order_status_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_order_status_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all order statuses # noqa: E501
Get the information for all order statuses defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_status_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrderStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_status_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOrderStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_status_using_get(self, order_status_id, **kwargs): # noqa: E501
"""Retrieve an order status # noqa: E501
Retrieve the information for an order status defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_status_using_get(order_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_status_id: UUID order_status_id (required)
:return: OrderStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_status_using_get_with_http_info(order_status_id, **kwargs) # noqa: E501
else:
(data) = self.get_order_status_using_get_with_http_info(order_status_id, **kwargs) # noqa: E501
return data
def get_order_status_using_get_with_http_info(self, order_status_id, **kwargs): # noqa: E501
"""Retrieve an order status # noqa: E501
Retrieve the information for an order status defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_status_using_get_with_http_info(order_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_status_id: UUID order_status_id (required)
:return: OrderStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_status_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_status_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_status_id' is set
if self.api_client.client_side_validation and ('order_status_id' not in params or
params['order_status_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_status_id` when calling `get_order_status_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_status_id' in params:
path_params['order_status_id'] = params['order_status_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_status/{order_status_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_track_all_using_get(self, **kwargs): # noqa: E501
"""List all order tracking records # noqa: E501
Get the information for all order tracking record for all order records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_track_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrderTrack
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_track_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_order_track_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_order_track_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all order tracking records # noqa: E501
Get the information for all order tracking record for all order records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_track_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageOrderTrack
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_track_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_track', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOrderTrack', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_track_using_get(self, order_track_id, **kwargs): # noqa: E501
"""Retrieve an order tracking record # noqa: E501
Retrieve the information for an order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_track_using_get(order_track_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_track_id: UUID order_track_id (required)
:return: OrderTrack
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_track_using_get_with_http_info(order_track_id, **kwargs) # noqa: E501
else:
(data) = self.get_order_track_using_get_with_http_info(order_track_id, **kwargs) # noqa: E501
return data
def get_order_track_using_get_with_http_info(self, order_track_id, **kwargs): # noqa: E501
"""Retrieve an order tracking record # noqa: E501
Retrieve the information for an order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_track_using_get_with_http_info(order_track_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_track_id: UUID order_track_id (required)
:return: OrderTrack
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_track_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_track_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_track_id' is set
if self.api_client.client_side_validation and ('order_track_id' not in params or
params['order_track_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_track_id` when calling `get_order_track_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_track_id' in params:
path_params['order_track_id'] = params['order_track_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_track/{order_track_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderTrack', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_using_get(self, order_id, **kwargs): # noqa: E501
"""Retrieve an order record # noqa: E501
Retrieve the information for an order record. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_using_get(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: UUID order_id (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_using_get_with_http_info(order_id, **kwargs) # noqa: E501
else:
(data) = self.get_order_using_get_with_http_info(order_id, **kwargs) # noqa: E501
return data
def get_order_using_get_with_http_info(self, order_id, **kwargs): # noqa: E501
"""Retrieve an order record # noqa: E501
Retrieve the information for an order record. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_using_get_with_http_info(order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str order_id: UUID order_id (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_id' is set
if self.api_client.client_side_validation and ('order_id' not in params or
params['order_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_id` when calling `get_order_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order/{order_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_order_status_using_put(self, order_status, order_status_id, **kwargs): # noqa: E501
"""Update an order status # noqa: E501
Update the information for an order status defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_status_using_put(order_status, order_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object order_status: order_status (required)
:param str order_status_id: UUID order_status_id (required)
:return: OrderStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_order_status_using_put_with_http_info(order_status, order_status_id, **kwargs) # noqa: E501
else:
(data) = self.update_order_status_using_put_with_http_info(order_status, order_status_id, **kwargs) # noqa: E501
return data
def update_order_status_using_put_with_http_info(self, order_status, order_status_id, **kwargs): # noqa: E501
"""Update an order status # noqa: E501
Update the information for an order status defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_status_using_put_with_http_info(order_status, order_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object order_status: order_status (required)
:param str order_status_id: UUID order_status_id (required)
:return: OrderStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_status', 'order_status_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_order_status_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_status' is set
if self.api_client.client_side_validation and ('order_status' not in params or
params['order_status'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_status` when calling `update_order_status_using_put`") # noqa: E501
# verify the required parameter 'order_status_id' is set
if self.api_client.client_side_validation and ('order_status_id' not in params or
params['order_status_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_status_id` when calling `update_order_status_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_status_id' in params:
path_params['order_status_id'] = params['order_status_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order_status' in params:
body_params = params['order_status']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_status/{order_status_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_order_track_using_put(self, order_track, order_track_id, **kwargs): # noqa: E501
"""Update an order tracking record # noqa: E501
Update the information for an order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_track_using_put(order_track, order_track_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object order_track: order_track (required)
:param str order_track_id: UUID order_track_id (required)
:return: OrderTrack
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_order_track_using_put_with_http_info(order_track, order_track_id, **kwargs) # noqa: E501
else:
(data) = self.update_order_track_using_put_with_http_info(order_track, order_track_id, **kwargs) # noqa: E501
return data
def update_order_track_using_put_with_http_info(self, order_track, order_track_id, **kwargs): # noqa: E501
"""Update an order tracking record # noqa: E501
Update the information for an order tracking record for an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_track_using_put_with_http_info(order_track, order_track_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object order_track: order_track (required)
:param str order_track_id: UUID order_track_id (required)
:return: OrderTrack
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_track', 'order_track_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_order_track_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order_track' is set
if self.api_client.client_side_validation and ('order_track' not in params or
params['order_track'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_track` when calling `update_order_track_using_put`") # noqa: E501
# verify the required parameter 'order_track_id' is set
if self.api_client.client_side_validation and ('order_track_id' not in params or
params['order_track_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_track_id` when calling `update_order_track_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_track_id' in params:
path_params['order_track_id'] = params['order_track_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order_track' in params:
body_params = params['order_track']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order_track/{order_track_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderTrack', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_order_using_put(self, order, order_id, **kwargs): # noqa: E501
"""Update an order record # noqa: E501
Update the information for an order record. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_using_put(order, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object order: order (required)
:param str order_id: UUID order_id (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_order_using_put_with_http_info(order, order_id, **kwargs) # noqa: E501
else:
(data) = self.update_order_using_put_with_http_info(order, order_id, **kwargs) # noqa: E501
return data
def update_order_using_put_with_http_info(self, order, order_id, **kwargs): # noqa: E501
"""Update an order record # noqa: E501
Update the information for an order record. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_using_put_with_http_info(order, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object order: order (required)
:param str order_id: UUID order_id (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order', 'order_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_order_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'order' is set
if self.api_client.client_side_validation and ('order' not in params or
params['order'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order` when calling `update_order_using_put`") # noqa: E501
# verify the required parameter 'order_id' is set
if self.api_client.client_side_validation and ('order_id' not in params or
params['order_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `order_id` when calling `update_order_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order' in params:
body_params = params['order']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/order/{order_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.367657
| 209
| 0.620826
| 15,434
| 131,404
| 4.993326
| 0.014384
| 0.052318
| 0.020073
| 0.027093
| 0.992643
| 0.988919
| 0.985921
| 0.978318
| 0.975009
| 0.967249
| 0
| 0.017006
| 0.293423
| 131,404
| 3,029
| 210
| 43.381974
| 0.813037
| 0.325234
| 0
| 0.82303
| 1
| 0
| 0.195486
| 0.066135
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035758
| false
| 0
| 0.002424
| 0
| 0.091515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f7e437526d1709a91cef21264a7ccfcec1d95993
| 75
|
py
|
Python
|
bl25.py
|
rinapyktina/bl
|
38546465c8802be184fbd44ae521af54a5ec504f
|
[
"MIT"
] | null | null | null |
bl25.py
|
rinapyktina/bl
|
38546465c8802be184fbd44ae521af54a5ec504f
|
[
"MIT"
] | null | null | null |
bl25.py
|
rinapyktina/bl
|
38546465c8802be184fbd44ae521af54a5ec504f
|
[
"MIT"
] | null | null | null |
c = 21
d = 8
c = c // d
d = c * d
c = d % c
print("c =", c, "d =", d)
| 12.5
| 26
| 0.306667
| 18
| 75
| 1.277778
| 0.277778
| 0.347826
| 0.26087
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0.426667
| 75
| 6
| 26
| 12.5
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0.084507
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7eecedd9c8f45f38cad94cc7a296b66b62a67c5
| 16,103
|
py
|
Python
|
tb_api_client/swagger_client/apis/admin_controller_api.py
|
MOSAIC-LoPoW/oss7-thingsboard-backend-example
|
9b289dd7fdbb6e932ca338ad497a7bb1fc84d010
|
[
"Apache-2.0"
] | 5
|
2017-11-27T15:48:16.000Z
|
2020-09-21T04:18:47.000Z
|
tb_api_client/swagger_client/apis/admin_controller_api.py
|
MOSAIC-LoPoW/oss7-thingsboard-backend-example
|
9b289dd7fdbb6e932ca338ad497a7bb1fc84d010
|
[
"Apache-2.0"
] | null | null | null |
tb_api_client/swagger_client/apis/admin_controller_api.py
|
MOSAIC-LoPoW/oss7-thingsboard-backend-example
|
9b289dd7fdbb6e932ca338ad497a7bb1fc84d010
|
[
"Apache-2.0"
] | 6
|
2018-01-14T17:23:46.000Z
|
2019-06-24T13:38:54.000Z
|
# coding: utf-8
"""
Thingsboard REST API
For instructions how to authorize requests please visit <a href='http://thingsboard.io/docs/reference/rest-api/'>REST API documentation page</a>.
OpenAPI spec version: 2.0
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class AdminControllerApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def check_updates_using_get(self, **kwargs):
"""
checkUpdates
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.check_updates_using_get(async=True)
>>> result = thread.get()
:param async bool
:return: UpdateMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.check_updates_using_get_with_http_info(**kwargs)
else:
(data) = self.check_updates_using_get_with_http_info(**kwargs)
return data
def check_updates_using_get_with_http_info(self, **kwargs):
"""
checkUpdates
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.check_updates_using_get_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: UpdateMessage
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_updates_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/admin/updates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateMessage',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_admin_settings_using_get(self, key, **kwargs):
"""
getAdminSettings
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_admin_settings_using_get(key, async=True)
>>> result = thread.get()
:param async bool
:param str key: key (required)
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_admin_settings_using_get_with_http_info(key, **kwargs)
else:
(data) = self.get_admin_settings_using_get_with_http_info(key, **kwargs)
return data
def get_admin_settings_using_get_with_http_info(self, key, **kwargs):
"""
getAdminSettings
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_admin_settings_using_get_with_http_info(key, async=True)
>>> result = thread.get()
:param async bool
:param str key: key (required)
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['key']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_admin_settings_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'key' is set
if ('key' not in params) or (params['key'] is None):
raise ValueError("Missing the required parameter `key` when calling `get_admin_settings_using_get`")
collection_formats = {}
path_params = {}
if 'key' in params:
path_params['key'] = params['key']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/admin/settings/{key}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdminSettings',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_admin_settings_using_post(self, admin_settings, **kwargs):
"""
saveAdminSettings
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.save_admin_settings_using_post(admin_settings, async=True)
>>> result = thread.get()
:param async bool
:param AdminSettings admin_settings: adminSettings (required)
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.save_admin_settings_using_post_with_http_info(admin_settings, **kwargs)
else:
(data) = self.save_admin_settings_using_post_with_http_info(admin_settings, **kwargs)
return data
def save_admin_settings_using_post_with_http_info(self, admin_settings, **kwargs):
"""
saveAdminSettings
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.save_admin_settings_using_post_with_http_info(admin_settings, async=True)
>>> result = thread.get()
:param async bool
:param AdminSettings admin_settings: adminSettings (required)
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['admin_settings']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_admin_settings_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'admin_settings' is set
if ('admin_settings' not in params) or (params['admin_settings'] is None):
raise ValueError("Missing the required parameter `admin_settings` when calling `save_admin_settings_using_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'admin_settings' in params:
body_params = params['admin_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/admin/settings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdminSettings',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_test_mail_using_post(self, admin_settings, **kwargs):
"""
sendTestMail
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.send_test_mail_using_post(admin_settings, async=True)
>>> result = thread.get()
:param async bool
:param AdminSettings admin_settings: adminSettings (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.send_test_mail_using_post_with_http_info(admin_settings, **kwargs)
else:
(data) = self.send_test_mail_using_post_with_http_info(admin_settings, **kwargs)
return data
def send_test_mail_using_post_with_http_info(self, admin_settings, **kwargs):
"""
sendTestMail
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.send_test_mail_using_post_with_http_info(admin_settings, async=True)
>>> result = thread.get()
:param async bool
:param AdminSettings admin_settings: adminSettings (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['admin_settings']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_test_mail_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'admin_settings' is set
if ('admin_settings' not in params) or (params['admin_settings'] is None):
raise ValueError("Missing the required parameter `admin_settings` when calling `send_test_mail_using_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'admin_settings' in params:
body_params = params['admin_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/admin/settings/testMail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.990315
| 149
| 0.559399
| 1,624
| 16,103
| 5.250616
| 0.102217
| 0.074704
| 0.02627
| 0.033775
| 0.91568
| 0.900786
| 0.890231
| 0.876158
| 0.862906
| 0.841797
| 0
| 0.000485
| 0.35956
| 16,103
| 412
| 150
| 39.084951
| 0.826336
| 0.030553
| 0
| 0.772093
| 0
| 0
| 0.15247
| 0.045812
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.027907
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
79169f1c611da414934001691edb941bfe7f94a4
| 11,830
|
py
|
Python
|
src/lib/databaseIO/sqLiteIO.py
|
madelinelimm/newcookiectest
|
3283eae420aa1a3df0fca7fa4f00e90cc56c032b
|
[
"MIT"
] | null | null | null |
src/lib/databaseIO/sqLiteIO.py
|
madelinelimm/newcookiectest
|
3283eae420aa1a3df0fca7fa4f00e90cc56c032b
|
[
"MIT"
] | null | null | null |
src/lib/databaseIO/sqLiteIO.py
|
madelinelimm/newcookiectest
|
3283eae420aa1a3df0fca7fa4f00e90cc56c032b
|
[
"MIT"
] | null | null | null |
from logs import logDecorator as lD
import jsonref, sqlite3
config = jsonref.load(open('../config/config.json'))
logBase = config['logging']['logBase'] + '.databaseIO.sqLiteIO'
@lD.log(logBase + '.getAllData')
def getAllData(logger, query, values=None, dbName=None):
'''query data from the database
Query the data over here. If there is a problem with the data, it is going
to return the value of None, and log the error. Your program needs to check
whether there was an error with the query by checking for a None return
value. Note that the location of the dataabses are assumed to be present
within the file ``../config/db.json``.
Parameters
----------
logger : {logging.logger}
logging element
query : {str}
The query to be made to the databse
values : {tuple or list-like}, optional
Additional values to be passed to the query (the default is None)
dbName : {str or None}, optional
The name of the database to use. If this is None, the function will
attempt to read the name from the ``defaultDB`` item within the
file ``../config/db.json``.
Returns
-------
list or None
A list of tuples containing the values is returned. In case
there is an error, the error will be logged, and a None will
be return
'''
vals = None
try:
db = jsonref.load(open('../config/db.json'))
# Check whether a dbName is available
if (dbName is None) and ('defaultDB' in db):
dbName = db['defaultDB']
# Check whether a dbName has been specified
if dbName is None:
logger.error('A database name has not been specified.')
return None
conn = sqlite3.connect(db[dbName]['connection'])
cur = conn.cursor()
except Exception as e:
logger.error('Unable to connect to the database')
logger.error(str(e))
return
try:
if values is None:
cur.execute(query)
else:
cur.execute(query, values)
# We assume that the data is small so we
# can download the entire thing here ...
# -------------------------------------------
vals = cur.fetchall()
except Exception as e:
logger.error('Unable to obtain data from the database for:\n query: {}\n{values}'.format(query, values))
logger.error(str(e))
try:
cur.close()
conn.close()
except Exception as e:
logger.error('Unable to disconnect to the database')
logger.error(str(e))
return
return vals
@lD.log(logBase + '.getDataIterator')
def getDataIterator(logger, query, values=None, chunks=100, dbName=None):
'''Create an iterator from a largish query
This is a generator that returns values in chunks of chunksize ``chunks``.
Parameters
----------
logger : {logging.logger}
logging element
query : {str}
The query to be made to the databse
values : {tuple or list-like}, optional
Additional values to be passed to the query (the default
is None)
chunks : {number}, optional
This is the number of rows that the data is going to return at every call
if __next__() to this function. (the default is 100)
dbName : {str or None}, optional
The name of the database to use. If this is None, the function will
attempt to read the name from the ``defaultDB`` item within the
file ``../config/db.json``.
Yields
------
list of tuples
A list of tuples from the query, with a maximum of ``chunks`` tuples returned
at one time.
'''
try:
db = jsonref.load(open('../config/db.json'))
# Check whether a dbName is available
if (dbName is None) and ('defaultDB' in db):
dbName = db['defaultDB']
# Check whether a dbName has been specified
if dbName is None:
logger.error('A database name has not been specified.')
return None
conn = sqlite3.connect(db[dbName]['connection'])
cur = conn.cursor()
except Exception as e:
logger.error('Unable to connect to the database')
logger.error(str(e))
return
try:
if values is None:
cur.execute(query)
else:
cur.execute(query, values)
while True:
vals = cur.fetchmany(chunks)
if len(vals) == 0:
break
yield vals
except Exception as e:
logger.error('Unable to obtain data from the database for:\n query: {}\nvalues'.format(query, values))
logger.error(str(e))
try:
conn.close()
except Exception as e:
logger.error('Unable to disconnect to the database')
logger.error(str(e))
return
return
@lD.log(logBase + '.getSingleDataIterator')
def getSingleDataIterator(logger, query, values=None, dbName=None):
'''Create an iterator from a largish query
This is a generator that returns values in chunks of chunksize 1.
Parameters
----------
logger : {logging.logger}
logging element
query : {str}
The query to be made to the databse
values : {tuple or list-like}, optional
Additional values to be passed to the query (the default
is None)
dbName : {str or None}, optional
The name of the database to use. If this is None, the function will
attempt to read the name from the ``defaultDB`` item within the
file ``../config/db.json``.
Yields
------
list of tuples
A list of tuples from the query, with a maximum of ``chunks`` tuples returned
at one time.
'''
try:
db = jsonref.load(open('../config/db.json'))
# Check whether a dbName is available
if (dbName is None) and ('defaultDB' in db):
dbName = db['defaultDB']
# Check whether a dbName has been specified
if dbName is None:
logger.error('A database name has not been specified.')
return None
conn = sqlite3.connect(db[dbName]['connection'])
cur = conn.cursor()
except Exception as e:
logger.error('Unable to connect to the database')
logger.error(str(e))
return
try:
if values is None:
cur.execute(query)
else:
cur.execute(query, values)
while True:
vals = cur.fetchone()
if vals is None:
break
yield vals
except Exception as e:
logger.error('Unable to obtain data from the database for:\n query: {}\nvalues'.format(query, values))
logger.error(str(e))
try:
conn.close()
except Exception as e:
logger.error('Unable to disconnect to the database')
logger.error(str(e))
return
return
@lD.log(logBase + '.commitData')
def commitData(logger, query, values=None, dbName=None):
'''query data from the database
Query the data over here. If there is a problem with
the data, it is going to return the value of ``None``, and
log the error. Your program needs to check whether
there was an error with the query by checking for a ``None``
return value
Parameters
----------
logger : {logging.logger}
logging element
query : {str}
The query to be made to the databse
values : {tuple or list-like}, optional
Additional values to be passed to the query (the default
is None)
dbName : {str or None}, optional
The name of the database to use. If this is None, the function will
attempt to read the name from the ``defaultDB`` item within the
file ``../config/db.json``.
Returns
-------
True or None
On successful completion, a ``True`` is returned. In case
there is an error, the error will be logged, and a ``None`` will
be returnd
'''
vals = True
try:
db = jsonref.load(open('../config/db.json'))
# Check whether a dbName is available
if (dbName is None) and ('defaultDB' in db):
dbName = db['defaultDB']
# Check whether a dbName has been specified
if dbName is None:
logger.error('A database name has not been specified.')
return None
conn = sqlite3.connect(db[dbName]['connection'])
cur = conn.cursor()
except Exception as e:
logger.error('Unable to connect to the database')
logger.error(str(e))
return None
try:
if values is None:
cur.execute(query)
else:
cur.execute(query, values)
except Exception as e:
logger.error('Unable to obtain data from the database for:\n query: {}\nvalues'.format(query, values))
logger.error(str(e))
vals = None
try:
conn.commit()
cur.close()
conn.close()
except Exception as e:
logger.error('Unable to disconnect to the database')
logger.error(str(e))
return
return vals
@lD.log(logBase + '.commitDataList')
def commitDataList(logger, query, values, dbName=None):
'''query data from the database
Query the data over here. If there is a problem with
the data, it is going to return the value of None, and
log the error. Your program needs to check whether
there was an error with the query by checking for a ``None``
return value
Parameters
----------
logger : {logging.logger}
logging element
query : {str}
The query to be made to the databse
values : {tuple or list-like}, optional
Additional values to be passed to the query (the default
is None)
dbName : {str or None}, optional
The name of the database to use. If this is None, the function will
attempt to read the name from the ``defaultDB`` item within the
file ``../config/db.json``.
Returns
-------
True or None
A successful completion of this function returns a ``True``.
In case there is an error, the error will be logged, and a ``None`` will
be returned
'''
val = True
try:
db = jsonref.load(open('../config/db.json'))
# Check whether a dbName is available
if (dbName is None) and ('defaultDB' in db):
dbName = db['defaultDB']
# Check whether a dbName has been specified
if dbName is None:
logger.error('A database name has not been specified.')
return None
conn = sqlite3.connect(db[dbName]['connection'])
cur = conn.cursor()
except Exception as e:
logger.error('Unable to connect to the database')
logger.error(str(e))
return None
try:
cur.executemany(query, values)
except Exception as e:
logger.error('Unable to execute query for:\n query: {}\nvalues'.format(query, values))
logger.error(str(e))
val = None
try:
conn.commit()
cur.close()
conn.close()
except Exception as e:
logger.error('Unable to disconnect to the database')
logger.error(str(e))
return None
return val
| 30.727273
| 113
| 0.573373
| 1,504
| 11,830
| 4.507314
| 0.108378
| 0.056793
| 0.037616
| 0.039829
| 0.876088
| 0.876088
| 0.869302
| 0.869302
| 0.864139
| 0.863549
| 0
| 0.001777
| 0.333981
| 11,830
| 384
| 114
| 30.807292
| 0.858612
| 0.40803
| 0
| 0.848837
| 0
| 0
| 0.198611
| 0.007111
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02907
| false
| 0
| 0.011628
| 0
| 0.133721
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
791aff2e9c7df043bc3a1dd777008877613f5a94
| 9,426
|
py
|
Python
|
old_tests/test_hombre_scheduler.py
|
eirrgang/radical.pilot
|
ceccd1867dd172935d602ff4c33a5ed4467e0dc8
|
[
"MIT"
] | 1
|
2021-11-07T04:51:30.000Z
|
2021-11-07T04:51:30.000Z
|
old_tests/test_hombre_scheduler.py
|
eirrgang/radical.pilot
|
ceccd1867dd172935d602ff4c33a5ed4467e0dc8
|
[
"MIT"
] | null | null | null |
old_tests/test_hombre_scheduler.py
|
eirrgang/radical.pilot
|
ceccd1867dd172935d602ff4c33a5ed4467e0dc8
|
[
"MIT"
] | null | null | null |
import os
import pytest
import radical.utils as ru
import radical.pilot as rp
import radical.pilot.constants as rpc
from radical.pilot.agent.scheduler.hombre import Hombre
try:
import mock
except ImportError:
from tasktest import mock
# ------------------------------------------------------------------------------
# User Input for test
resource_name = 'local.localhost'
access_schema = 'ssh'
# Sample data to be staged -- available in cwd
cur_dir = os.path.dirname(os.path.abspath(__file__))
# ------------------------------------------------------------------------------
# Setup for every test
def setUp():
session = rp.Session()
config = {'lrms_info' : {'lm_info' : 'INFO',
'n_nodes' : 2,
'cores_per_node' : 4,
'gpus_per_node' : 2,
'node_list' : [['0', 0], ['1', 1]]}}
return config, session
# ------------------------------------------------------------------------------
#
def cud_nonmpi():
return {'cpu_process_type' : None,
'cpu_thread_type' : None,
'cpu_processes' : 1,
'cpu_threads' : 2,
'gpu_process_type' : None,
'gpu_thread_type' : None,
'gpu_processes' : 1,
'gpu_threads' : 1}
# ------------------------------------------------------------------------------
#
def cud_mpi():
return {'cpu_process_type' : rpc.MPI,
'cpu_thread_type' : None,
'cpu_processes' : 3,
'cpu_threads' : 1,
'gpu_process_type' : rpc.MPI,
'gpu_thread_type' : None,
'gpu_processes' : 1,
'gpu_threads' : 1}
# ------------------------------------------------------------------------------
# Cleanup any folders and files to leave the system state
# as prior to the test
def tearDown(session):
session.close()
# ------------------------------------------------------------------------------
# Test non mpi tasks
@mock.patch.object(Hombre, '__init__', return_value=None)
@mock.patch.object(Hombre, 'advance')
@mock.patch.object(ru.Profiler, 'prof')
@mock.patch('radical.utils.raise_on')
def test_nonmpi_task_withhombre_scheduler(mocked_init,
mocked_method,
mocked_profiler,
mocked_raise_on):
cfg, session = setUp()
component = Hombre(cfg=dict(), session=session)
component._log = ru.Logger('radical.pilot.test')
component._configured = False
component._cfg = cfg
component._lrms_info = cfg['lrms_info']
component._lrms_lm_info = cfg['lrms_info']['lm_info']
component._lrms_n_nodes = cfg['lrms_info']['n_nodes']
component._lrms_node_list = cfg['lrms_info']['node_list']
component._lrms_cores_per_node = cfg['lrms_info']['cores_per_node']
component._lrms_gpus_per_node = cfg['lrms_info']['gpus_per_node']
component.nodes = list()
for node in component._lrms_node_list:
component.nodes.append({'uid' : node[0],
'name' : node[1]})
# populate component attributes
component._configure()
component._oversubscribe = False
# we expect these slots to be available
all_slots = list()
for n in range(component._lrms_n_nodes):
all_slots.append({'lm_info' : 'INFO',
'cores_per_node' : 4,
'gpus_per_node' : 2,
'ncblocks' : 1,
'ngblocks' : 1,
'nodes' : [{'name': n,
'uid' : str(n),
'core_map' : [[0, 1]],
'gpu_map' : []},
{'name': n,
'uid' : str(n),
'core_map' : [[0]],
'gpu_map' : [[0]]}
]
})
all_slots.append({'lm_info' : 'INFO',
'cores_per_node' : 4,
'gpus_per_node' : 2,
'ncblocks' : 1,
'ngblocks' : 1,
'nodes' : [{'name': n,
'uid' : str(n),
'core_map' : [[2, 3]],
'gpu_map' : []},
{'name': n,
'uid' : str(n),
'core_map' : [[0]],
'gpu_map' : [[1]]}
]
})
# Allocate first TD -- should land on second node
td = cud_nonmpi()
slot = component._allocate_slot(td)
chk = all_slots[-1]
assert(slot == chk)
# Allocate second TD -- should also land on second node
td = cud_nonmpi()
slot = component._allocate_slot(td)
chk = all_slots[-2]
assert(slot == chk)
# Allocate third TD -- should land on first node
td = cud_nonmpi()
slot = component._allocate_slot(td)
chk = all_slots[-3]
assert(slot == chk)
# Allocate fourth TD -- should also land on tecond node
td = cud_nonmpi()
slot = component._allocate_slot(td)
assert slot == all_slots[-4]
# Fail with ValueError if heterogeneous CUs are scheduled
with pytest.raises(ValueError):
td = cud_nonmpi()
td['gpu_processes'] = 2
slot = component._allocate_slot(td)
# expext no slots now, as all resources are used
td = cud_nonmpi()
noslot = component._allocate_slot(td)
assert(noslot is None)
# Deallocate last filled slot
component._release_slot(slot)
# we should get a new slot now, which is the same as the one just freed
td = cud_nonmpi()
newslot = component._allocate_slot(td)
assert(newslot == slot)
tearDown(session)
# ------------------------------------------------------------------------------
# Test mpi tasks
@mock.patch.object(Hombre, '__init__', return_value=None)
@mock.patch.object(Hombre, 'advance')
@mock.patch.object(ru.Profiler, 'prof')
@mock.patch('radical.utils.raise_on')
def test_mpi_task_withhombre_scheduler(mocked_init,
mocked_method,
mocked_profiler,
mocked_raise_on):
cfg, session = setUp()
component = Hombre(cfg=dict(), session=session)
component._log = ru.Logger('radical.pilot.test')
component._configured = False
component._cfg = cfg
component._lrms_info = cfg['lrms_info']
component._lrms_lm_info = cfg['lrms_info']['lm_info']
component._lrms_n_nodes = cfg['lrms_info']['n_nodes']
component._lrms_node_list = cfg['lrms_info']['node_list']
component._lrms_cores_per_node = cfg['lrms_info']['cores_per_node']
component._lrms_gpus_per_node = cfg['lrms_info']['gpus_per_node']
component.nodes = list()
for node in component._lrms_node_list:
component.nodes.append({'uid' : node[0],
'name' : node[1]})
# populate component attributes
component._configure()
component._oversubscribe = True
# we expect these slots to be available
all_slots = [{
'lm_info' : 'INFO',
'cores_per_node' : 4,
'gpus_per_node' : 2,
'nodes' : [[0, '0', [[0], [1], [2]], [[0]]]]
},
{
'lm_info' : 'INFO',
'cores_per_node' : 4,
'gpus_per_node' : 2,
'nodes' : [[1, '1', [[0], [1], [2]], [[0]]]]
}]
# Allocate first TD -- should land on second node
td = cud_mpi()
slot = component._allocate_slot(td)
chk = all_slots[-1]
assert(slot == chk)
# Allocate second TD -- should land on first node
td = cud_mpi()
slot = component._allocate_slot(td)
assert slot == all_slots[-2]
# Fail with ValueError if heterogeneous CUs are scheduled
with pytest.raises(ValueError):
td = cud_mpi()
td['gpu_processes'] = 2
slot = component._allocate_slot(td)
# expext no slots now, as all resources are used
td = cud_mpi()
noslot = component._allocate_slot(td)
assert(noslot is None)
# Deallocate last filled slot
component._release_slot(slot)
# we should get a new slot now, which is the same as the one just freed
td = cud_mpi()
newslot = component._allocate_slot(td)
assert(newslot == slot)
tearDown(session)
# ------------------------------------------------------------------------------
| 33.906475
| 80
| 0.462232
| 926
| 9,426
| 4.452484
| 0.184665
| 0.03056
| 0.032016
| 0.066942
| 0.800873
| 0.795537
| 0.78147
| 0.78147
| 0.767887
| 0.719864
| 0
| 0.009961
| 0.371632
| 9,426
| 277
| 81
| 34.028881
| 0.686139
| 0.176851
| 0
| 0.707182
| 0
| 0
| 0.124741
| 0.005699
| 0
| 0
| 0
| 0
| 0.055249
| 1
| 0.033149
| false
| 0
| 0.049724
| 0.01105
| 0.099448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
791df5118d279500cda8c5e74f7f645a1a2bb831
| 10,104
|
py
|
Python
|
src/seller_products/migrations/0001_initial.py
|
evis-market/web-interface-backend
|
f8930ff1c009ad18e522ab29680b4bcd50a6020e
|
[
"MIT"
] | 2
|
2021-08-30T22:58:32.000Z
|
2021-12-12T10:47:52.000Z
|
src/seller_products/migrations/0001_initial.py
|
evis-market/web-interface-backend
|
f8930ff1c009ad18e522ab29680b4bcd50a6020e
|
[
"MIT"
] | null | null | null |
src/seller_products/migrations/0001_initial.py
|
evis-market/web-interface-backend
|
f8930ff1c009ad18e522ab29680b4bcd50a6020e
|
[
"MIT"
] | 1
|
2021-08-22T19:12:44.000Z
|
2021-08-22T19:12:44.000Z
|
# Generated by Django 3.2.7 on 2021-09-14 11:14
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('product_data_types', '0001_initial'),
('data_delivery_types', '0001_initial'),
('sellers', '0001_initial'),
('geo_regions', '0001_initial'),
('categories', '0004_auto_20210905_1349'),
]
operations = [
migrations.CreateModel(
name='SellerProduct',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=500, verbose_name='Name')),
('descr', models.TextField(blank=True, default='', verbose_name='Description')),
('price_per_one_time', models.FloatField(blank=True, default=None, null=True, verbose_name='Price per one time usage')),
('price_per_month', models.FloatField(blank=True, default=None, null=True, verbose_name='Price per month')),
('price_per_year', models.FloatField(blank=True, default=None, null=True, verbose_name='Price per year')),
('price_by_request', models.FloatField(blank=True, default=None, null=True, verbose_name='Price by request')),
('price_per_usage', models.BooleanField(blank=True, default=None, null=True, verbose_name='Price per usage True/False')),
('price_per_usage_descr', models.TextField(blank=True, default=None, null=True, verbose_name='Purhcase method description')),
('rating', models.FloatField(blank=True, default=None, null=True, verbose_name='Rating')),
('total_reviews_cnt', models.IntegerField(default=0, verbose_name='Total count of reviews')),
('version', models.IntegerField(default=1, verbose_name='Version')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('categories', models.ManyToManyField(blank=True, db_table='seller_product_categories', to='categories.Category', verbose_name='Content categories')),
('data_delivery_types', models.ManyToManyField(blank=True, db_table='seller_product_data_delivery_types', to='data_delivery_types.DataDeliveryType', verbose_name='Content data types')),
('data_formats', models.ManyToManyField(blank=True, db_table='seller_product_data_formats', to='product_data_types.DataFormat', verbose_name='Content data formats')),
('data_types', models.ManyToManyField(blank=True, db_table='seller_product_data_types', to='product_data_types.DataType', verbose_name='Content data types')),
('geo_regions', models.ManyToManyField(blank=True, db_table='seller_product_geo_regions', to='geo_regions.GeoRegion', verbose_name='Content geo regions')),
('seller', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sellers.seller')),
],
options={
'db_table': 'seller_products',
'abstract': False,
},
),
migrations.CreateModel(
name='SellerProductArchive',
fields=[
('name', models.CharField(max_length=500, verbose_name='Name')),
('descr', models.TextField(blank=True, default='', verbose_name='Description')),
('price_per_one_time', models.FloatField(blank=True, default=None, null=True, verbose_name='Price per one time usage')),
('price_per_month', models.FloatField(blank=True, default=None, null=True, verbose_name='Price per month')),
('price_per_year', models.FloatField(blank=True, default=None, null=True, verbose_name='Price per year')),
('price_by_request', models.FloatField(blank=True, default=None, null=True, verbose_name='Price by request')),
('price_per_usage', models.BooleanField(blank=True, default=None, null=True, verbose_name='Price per usage True/False')),
('price_per_usage_descr', models.TextField(blank=True, default=None, null=True, verbose_name='Purhcase method description')),
('rating', models.FloatField(blank=True, default=None, null=True, verbose_name='Rating')),
('total_reviews_cnt', models.IntegerField(default=0, verbose_name='Total count of reviews')),
('version', models.IntegerField(default=1, verbose_name='Version')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('id', models.BigAutoField(primary_key=True, serialize=False)),
('seller_product_id', models.IntegerField()),
('is_deleted', models.BooleanField(default=False)),
('categories', models.ManyToManyField(blank=True, db_table='seller_product_categories_archive', to='categories.Category', verbose_name='Content categories')),
('data_delivery_types', models.ManyToManyField(blank=True, db_table='seller_product_data_delivery_types_archive', to='data_delivery_types.DataDeliveryType', verbose_name='Content data types')),
('data_formats', models.ManyToManyField(blank=True, db_table='seller_product_data_formats_archive', to='product_data_types.DataFormat', verbose_name='Content data formats')),
('data_types', models.ManyToManyField(blank=True, db_table='seller_product_data_types_archive', to='product_data_types.DataType', verbose_name='Content data types')),
('geo_regions', models.ManyToManyField(blank=True, db_table='seller_product_geo_regions_archive', to='geo_regions.GeoRegion', verbose_name='Content geo-regions')),
('seller', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sellers.seller')),
],
options={
'db_table': 'seller_products_archive',
'abstract': False,
'unique_together': {('seller_product_id', 'version')},
},
),
migrations.CreateModel(
name='SellerProductDataUrlArchive',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(verbose_name='URL')),
('data_format', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.dataformat')),
('data_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.datatype')),
('seller_product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='data_urls_archive', to='seller_products.sellerproductarchive')),
],
options={
'verbose_name': 'Data url archive',
'verbose_name_plural': 'Data urls archive',
'db_table': 'seller_product_data_urls_archive',
},
),
migrations.CreateModel(
name='SellerProductDataUrl',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(verbose_name='URL')),
('data_format', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.dataformat')),
('data_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.datatype')),
('seller_product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='data_urls', to='seller_products.sellerproduct')),
],
options={
'verbose_name': 'Data url',
'verbose_name_plural': 'Data urls',
'db_table': 'seller_product_data_urls',
},
),
migrations.CreateModel(
name='SellerProductDataSampleArchive',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(verbose_name='URL')),
('data_format', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.dataformat')),
('data_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.datatype')),
('seller_product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='data_samples_archive', to='seller_products.sellerproductarchive')),
],
options={
'verbose_name': 'Data sample archive',
'verbose_name_plural': 'Data samples archive',
'db_table': 'seller_product_data_samples_archive',
},
),
migrations.CreateModel(
name='SellerProductDataSample',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(verbose_name='URL')),
('data_format', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.dataformat')),
('data_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product_data_types.datatype')),
('seller_product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='data_samples', to='seller_products.sellerproduct')),
],
options={
'verbose_name': 'Data sample',
'verbose_name_plural': 'Data samples',
'db_table': 'seller_product_data_samples',
},
),
]
| 71.15493
| 209
| 0.64519
| 1,084
| 10,104
| 5.75738
| 0.112546
| 0.093415
| 0.043262
| 0.052876
| 0.876462
| 0.847621
| 0.82647
| 0.82647
| 0.810127
| 0.789297
| 0
| 0.007221
| 0.218725
| 10,104
| 141
| 210
| 71.659574
| 0.78338
| 0.004454
| 0
| 0.574627
| 1
| 0
| 0.298399
| 0.117331
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014925
| 0
| 0.044776
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f72d627417aaa695ea5bcada408ff82f6f850efa
| 11,113
|
py
|
Python
|
trajectory_generator.py
|
keshaviyengar/rl-baselines-zoo
|
6e39f5c7c6c2d30873297308ed064551bffaa52d
|
[
"MIT"
] | null | null | null |
trajectory_generator.py
|
keshaviyengar/rl-baselines-zoo
|
6e39f5c7c6c2d30873297308ed064551bffaa52d
|
[
"MIT"
] | null | null | null |
trajectory_generator.py
|
keshaviyengar/rl-baselines-zoo
|
6e39f5c7c6c2d30873297308ed064551bffaa52d
|
[
"MIT"
] | null | null | null |
import rospy
from geometry_msgs.msg import Pose, Point
from std_msgs.msg import Bool
import numpy as np
import os
# This script creates a square trajectory for a robot to follow.
# Will output errors as well.
class CircleTrajectory(object):
def __init__(self, x_offset, y_offset, z_height, radius, theta_step):
self.trajectory_pub = rospy.Publisher("desired_goal", Pose, queue_size=10)
self.trajectory_finish_pub = rospy.Publisher("trajectory_finish", Bool, queue_size=10)
self._current_pose = Pose()
# Create a timer to update the desired trajectory
self.trajectory_timer = rospy.Timer(rospy.Duration(0.01), self._trajectory_callback)
self.traj_finish = False
# For now set initial current pose as 0
self._desired_pose = Pose()
self.x_offset = x_offset
self.y_offset = y_offset
self.radius = radius
self.thetas = np.arange(0, 2 * np.pi, np.deg2rad(theta_step))
self.thetas_counter = 0
self._desired_pose.position.x = self.x_offset + self.radius * np.cos(self.thetas[self.thetas_counter])
self._desired_pose.position.y = self.y_offset + self.radius * np.sin(self.thetas[self.thetas_counter])
self._desired_pose.position.z = z_height
self._desired_pose.orientation.x = 0
self._desired_pose.orientation.y = 0
self._desired_pose.orientation.z = 0
self._desired_pose.orientation.w = 1
self.speed = 1
def _trajectory_callback(self, event):
self.thetas_counter += 1
if self.thetas_counter == self.thetas.size - 1:
self.traj_finish = True
print("Trajectory is complete.")
self.trajectory_finish_pub.publish(True)
self.trajectory_timer.shutdown()
if not self.traj_finish:
self._desired_pose.position.x = self.x_offset + self.radius * np.cos(self.thetas[self.thetas_counter])
self._desired_pose.position.y = self.y_offset + self.radius * np.sin(self.thetas[self.thetas_counter])
# Publish new pose
self.trajectory_pub.publish(self._desired_pose)
class TriangleTrajectory(object):
def __init__(self, point_a, point_b, point_c, z_height):
self.trajectory_pub = rospy.Publisher("desired_goal", Pose, queue_size=10)
self.trajectory_finish_pub = rospy.Publisher("trajectory_finish", Bool, queue_size=10)
self._current_pose = Pose()
# Second timer for how long to move in axis before moving to next
# self.change_direction_timer = rospy.Timer(rospy.Duration(5.0), self._change_direction)
# Specify three points to reach to create the triangle
self.points = np.array([point_a, point_b, point_c])
self._turn_count = 0
self.del_vector = [(self.points[1][0] - self.points[0][0]), (self.points[1][1] - self.points[0][1])]
self._done_trajectory = False
self._desired_pose = Pose()
self._desired_pose.position.x = point_a[0]
self._desired_pose.position.y = point_a[1]
self._desired_pose.position.z = z_height
self._desired_pose.orientation.x = 0
self._desired_pose.orientation.y = 0
self._desired_pose.orientation.z = 0
self._desired_pose.orientation.w = 1
# Publish initial point and sleep to initialize
for _ in range(10):
self.trajectory_pub.publish(self._desired_pose)
rospy.sleep(0.1)
self.prev_time = rospy.get_time()
self.traj_finish = False
# Create a timer to update the desired trajectory
self.trajectory_timer = rospy.Timer(rospy.Duration(0.01), self._trajectory_callback)
# This callback changes the direction by 90 degrees, to make the square.
def _change_direction(self):
if self._turn_count == 0:
if np.linalg.norm(self.points[self._turn_count + 1] - np.array(
[self._desired_pose.position.x, self._desired_pose.position.y])) < 0.5:
self._turn_count += 1
self.del_vector = [(self.points[1][0] - self.points[0][0]),
(self.points[1][1] - self.points[0][1])]
if self._turn_count == 1:
if np.linalg.norm(self.points[self._turn_count + 1] - np.array(
[self._desired_pose.position.x, self._desired_pose.position.y])) < 0.5:
self._turn_count += 1
self.del_vector = [(self.points[2][0] - self.points[1][0]),
(self.points[2][1] - self.points[1][1])]
if self._turn_count == 2:
if np.linalg.norm(self.points[0] - np.array(
[self._desired_pose.position.x, self._desired_pose.position.y])) < 0.5:
self._turn_count += 1
self.del_vector = [(self.points[0][0] - self.points[2][0]),
(self.points[0][1] - self.points[2][1])]
if self._turn_count == 3:
print("Trajectory is complete.")
self.traj_finish = True
self.trajectory_finish_pub.publish(True)
self.trajectory_timer.shutdown()
# self.change_direction_timer.shutdown()
def _trajectory_callback(self, event):
# Compute current difference in time from last callback
if not self.traj_finish:
current_time = rospy.get_time()
delta_t = current_time - self.prev_time
self.prev_time = current_time
self._change_direction()
self._desired_pose.position.x += self.del_vector[0] * delta_t
self._desired_pose.position.y += self.del_vector[1] * delta_t
self.trajectory_pub.publish(self._desired_pose)
class SquareTrajectory2(object):
def __init__(self, point_a, point_b, point_c, point_d, z_height):
self.trajectory_pub = rospy.Publisher("desired_goal", Pose, queue_size=10)
self.trajectory_finish_pub = rospy.Publisher("trajectory_finish", Bool, queue_size=10)
self._current_pose = Pose()
self.points = [point_a, point_b, point_c, point_d]
self._turn_count = 0
self.del_vector = [(self.points[1][0] - self.points[0][0]), (self.points[1][1] - self.points[0][1])]
# For now set initial current pose as 0
self._desired_pose = Pose()
self._desired_pose.position.x = point_a[0]
self._desired_pose.position.y = point_a[1]
self._desired_pose.position.z = z_height
self._desired_pose.orientation.x = 0
self._desired_pose.orientation.y = 0
self._desired_pose.orientation.z = 0
self._desired_pose.orientation.w = 1
# Publish initial point and sleep to initialize
for _ in range(10):
self.trajectory_pub.publish(self._desired_pose)
rospy.sleep(0.1)
self.prev_time = rospy.get_time()
self.traj_finish = False
# Create a timer to update the desired trajectory
self.trajectory_timer = rospy.Timer(rospy.Duration(0.01), self._trajectory_callback)
# This callback changes the direction by 90 degrees, to make the square.
def _change_direction(self):
if self._turn_count == 0:
if np.linalg.norm(self.points[self._turn_count + 1] - np.array(
[self._desired_pose.position.x, self._desired_pose.position.y])) < 0.5:
self._turn_count += 1
self.del_vector = [(self.points[1][0] - self.points[0][0]),
(self.points[1][1] - self.points[0][1])]
if self._turn_count == 1:
if np.linalg.norm(self.points[self._turn_count + 1] - np.array(
[self._desired_pose.position.x, self._desired_pose.position.y])) < 0.5:
self._turn_count += 1
self.del_vector = [(self.points[2][0] - self.points[1][0]),
(self.points[2][1] - self.points[1][1])]
if self._turn_count == 2:
if np.linalg.norm(self.points[self._turn_count + 1] - np.array(
[self._desired_pose.position.x, self._desired_pose.position.y])) < 0.5:
self._turn_count += 1
self.del_vector = [(self.points[3][0] - self.points[2][0]),
(self.points[3][1] - self.points[2][1])]
if self._turn_count == 3:
if np.linalg.norm(self.points[0] - np.array(
[self._desired_pose.position.x, self._desired_pose.position.y])) < 0.5:
self._turn_count += 1
self.del_vector = [(self.points[0][0] - self.points[3][0]),
(self.points[0][1] - self.points[3][1])]
if self._turn_count == 4:
print("Trajectory is complete.")
self.traj_finish = True
self.trajectory_finish_pub.publish(True)
self.trajectory_timer.shutdown()
def _trajectory_callback(self, event):
# Compute current difference in time from last callback
if not self.traj_finish:
current_time = rospy.get_time()
delta_t = current_time - self.prev_time
self.prev_time = current_time
self._change_direction()
self._desired_pose.position.x += self.del_vector[0] * delta_t
self._desired_pose.position.y += self.del_vector[1] * delta_t
self.trajectory_pub.publish(self._desired_pose)
if __name__ == '__main__':
rospy.init_node("trajectory_generator")
experiments = [7]
for exp in experiments:
x_offset = 5
y_offset = 5
if exp in [1, 2, 3, 4, 5]:
z_height = 100
elif exp in [6, 7, 8, 9, 10]:
z_height = 100
else:
z_height = 125
radius = 2.0
theta_step = 0.5
print("Circle trajectory")
circle_trajectory = CircleTrajectory(x_offset, y_offset, z_height, radius, theta_step)
while not circle_trajectory.traj_finish:
if circle_trajectory.traj_finish:
break
# point_a = [20, 20]
# point_b = [20, 30]
# point_c = [30, 20]
# point_a = [-5, 0]
# point_b = [-10, -5]
# point_c = [5, 0]
# if exp in [1, 2, 3, 4, 5]:
# z_height = 100
# elif exp in [6, 7, 8, 9, 10]:
# z_height = 125
# else:
# z_height = 125
# print("Triangle trajectory")
# triangle_trajectory = TriangleTrajectory(point_a, point_b, point_c, z_height)
# while not triangle_trajectory.traj_finish:
# pass
# point_a = [5, 0]
# point_b = [-5, 0]
# point_c = [-5, -5]
# point_d = [5, -5]
# if exp in [1, 2, 3, 4, 5]:
# z_height = 100
# elif exp in [6, 7, 8, 9, 10]:
# z_height = 125
# else:
# z_height = 125
# print("Square trajectory")
# square_trajectory = SquareTrajectory2(point_a, point_b, point_c, point_d, z_height)
# while not square_trajectory.traj_finish:
# pass
| 41.778195
| 114
| 0.602268
| 1,490
| 11,113
| 4.240268
| 0.104698
| 0.085312
| 0.116334
| 0.105571
| 0.81893
| 0.799462
| 0.791865
| 0.782526
| 0.767015
| 0.755619
| 0
| 0.034028
| 0.283362
| 11,113
| 265
| 115
| 41.935849
| 0.759292
| 0.149015
| 0
| 0.734104
| 0
| 0
| 0.021367
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046243
| false
| 0
| 0.028902
| 0
| 0.092486
| 0.023121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7377dcd51a6292c65747a3c3db690121395d368
| 21,515
|
py
|
Python
|
sdk/python/pulumi_azuread/provider.py
|
ragnarstolsmark/pulumi-azuread
|
b9398511c142f0aad349e492ded419f870edc925
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azuread/provider.py
|
ragnarstolsmark/pulumi-azuread
|
b9398511c142f0aad349e492ded419f870edc925
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azuread/provider.py
|
ragnarstolsmark/pulumi-azuread
|
b9398511c142f0aad349e492ded419f870edc925
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ProviderArgs', 'Provider']
@pulumi.input_type
class ProviderArgs:
def __init__(__self__, *,
metadata_host: pulumi.Input[str],
client_certificate_password: Optional[pulumi.Input[str]] = None,
client_certificate_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
disable_terraform_partner_id: Optional[pulumi.Input[bool]] = None,
environment: Optional[pulumi.Input[str]] = None,
msi_endpoint: Optional[pulumi.Input[str]] = None,
partner_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
use_cli: Optional[pulumi.Input[bool]] = None,
use_microsoft_graph: Optional[pulumi.Input[bool]] = None,
use_msi: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a Provider resource.
:param pulumi.Input[str] metadata_host: [DEPRECATED] The Hostname which should be used for the Azure Metadata Service.
:param pulumi.Input[str] client_certificate_path: The path to the Client Certificate associated with the Service Principal for use when authenticating as a Service
Principal using a Client Certificate.
:param pulumi.Input[str] client_id: The Client ID which should be used for service principal authentication.
:param pulumi.Input[str] client_secret: The password to decrypt the Client Certificate. For use when authenticating as a Service Principal using a Client
Certificate
:param pulumi.Input[bool] disable_terraform_partner_id: Disable the Terraform Partner ID which is used if a custom `partner_id` isn't specified.
:param pulumi.Input[str] environment: The cloud environment which should be used. Possible values are `global` (formerly `public`), `usgovernment`, `dod`,
`germany`, and `china`. Defaults to `global`.
:param pulumi.Input[str] msi_endpoint: The path to a custom endpoint for Managed Identity - in most circumstances this should be detected automatically.
:param pulumi.Input[str] partner_id: A GUID/UUID that is registered with Microsoft to facilitate partner resource usage attribution.
:param pulumi.Input[str] tenant_id: The Tenant ID which should be used. Works with all authentication methods except Managed Identity.
:param pulumi.Input[bool] use_cli: Allow Azure CLI to be used for Authentication.
:param pulumi.Input[bool] use_microsoft_graph: Beta: Use the Microsoft Graph API, instead of the legacy Azure Active Directory Graph API, where supported.
:param pulumi.Input[bool] use_msi: Allow Managed Identity to be used for Authentication.
"""
pulumi.set(__self__, "metadata_host", metadata_host)
if client_certificate_password is not None:
pulumi.set(__self__, "client_certificate_password", client_certificate_password)
if client_certificate_path is not None:
pulumi.set(__self__, "client_certificate_path", client_certificate_path)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
if disable_terraform_partner_id is not None:
pulumi.set(__self__, "disable_terraform_partner_id", disable_terraform_partner_id)
if environment is None:
environment = (_utilities.get_env('ARM_ENVIRONMENT') or 'public')
if environment is not None:
pulumi.set(__self__, "environment", environment)
if msi_endpoint is None:
msi_endpoint = _utilities.get_env('ARM_MSI_ENDPOINT')
if msi_endpoint is not None:
pulumi.set(__self__, "msi_endpoint", msi_endpoint)
if partner_id is not None:
pulumi.set(__self__, "partner_id", partner_id)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if use_cli is not None:
pulumi.set(__self__, "use_cli", use_cli)
if use_microsoft_graph is not None:
pulumi.set(__self__, "use_microsoft_graph", use_microsoft_graph)
if use_msi is None:
use_msi = (_utilities.get_env_bool('ARM_USE_MSI') or False)
if use_msi is not None:
pulumi.set(__self__, "use_msi", use_msi)
@property
@pulumi.getter(name="metadataHost")
def metadata_host(self) -> pulumi.Input[str]:
"""
[DEPRECATED] The Hostname which should be used for the Azure Metadata Service.
"""
return pulumi.get(self, "metadata_host")
@metadata_host.setter
def metadata_host(self, value: pulumi.Input[str]):
pulumi.set(self, "metadata_host", value)
@property
@pulumi.getter(name="clientCertificatePassword")
def client_certificate_password(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_certificate_password")
@client_certificate_password.setter
def client_certificate_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_certificate_password", value)
@property
@pulumi.getter(name="clientCertificatePath")
def client_certificate_path(self) -> Optional[pulumi.Input[str]]:
"""
The path to the Client Certificate associated with the Service Principal for use when authenticating as a Service
Principal using a Client Certificate.
"""
return pulumi.get(self, "client_certificate_path")
@client_certificate_path.setter
def client_certificate_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_certificate_path", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The Client ID which should be used for service principal authentication.
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The password to decrypt the Client Certificate. For use when authenticating as a Service Principal using a Client
Certificate
"""
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret", value)
@property
@pulumi.getter(name="disableTerraformPartnerId")
def disable_terraform_partner_id(self) -> Optional[pulumi.Input[bool]]:
"""
Disable the Terraform Partner ID which is used if a custom `partner_id` isn't specified.
"""
return pulumi.get(self, "disable_terraform_partner_id")
@disable_terraform_partner_id.setter
def disable_terraform_partner_id(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_terraform_partner_id", value)
@property
@pulumi.getter
def environment(self) -> Optional[pulumi.Input[str]]:
"""
The cloud environment which should be used. Possible values are `global` (formerly `public`), `usgovernment`, `dod`,
`germany`, and `china`. Defaults to `global`.
"""
return pulumi.get(self, "environment")
@environment.setter
def environment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "environment", value)
@property
@pulumi.getter(name="msiEndpoint")
def msi_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The path to a custom endpoint for Managed Identity - in most circumstances this should be detected automatically.
"""
return pulumi.get(self, "msi_endpoint")
@msi_endpoint.setter
def msi_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "msi_endpoint", value)
@property
@pulumi.getter(name="partnerId")
def partner_id(self) -> Optional[pulumi.Input[str]]:
"""
A GUID/UUID that is registered with Microsoft to facilitate partner resource usage attribution.
"""
return pulumi.get(self, "partner_id")
@partner_id.setter
def partner_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_id", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The Tenant ID which should be used. Works with all authentication methods except Managed Identity.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="useCli")
def use_cli(self) -> Optional[pulumi.Input[bool]]:
"""
Allow Azure CLI to be used for Authentication.
"""
return pulumi.get(self, "use_cli")
@use_cli.setter
def use_cli(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_cli", value)
@property
@pulumi.getter(name="useMicrosoftGraph")
def use_microsoft_graph(self) -> Optional[pulumi.Input[bool]]:
"""
Beta: Use the Microsoft Graph API, instead of the legacy Azure Active Directory Graph API, where supported.
"""
return pulumi.get(self, "use_microsoft_graph")
@use_microsoft_graph.setter
def use_microsoft_graph(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_microsoft_graph", value)
@property
@pulumi.getter(name="useMsi")
def use_msi(self) -> Optional[pulumi.Input[bool]]:
"""
Allow Managed Identity to be used for Authentication.
"""
return pulumi.get(self, "use_msi")
@use_msi.setter
def use_msi(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_msi", value)
class Provider(pulumi.ProviderResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_certificate_password: Optional[pulumi.Input[str]] = None,
client_certificate_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
disable_terraform_partner_id: Optional[pulumi.Input[bool]] = None,
environment: Optional[pulumi.Input[str]] = None,
metadata_host: Optional[pulumi.Input[str]] = None,
msi_endpoint: Optional[pulumi.Input[str]] = None,
partner_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
use_cli: Optional[pulumi.Input[bool]] = None,
use_microsoft_graph: Optional[pulumi.Input[bool]] = None,
use_msi: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
The provider type for the azuread package. By default, resources use package-wide configuration
settings, however an explicit `Provider` instance may be created and passed during resource
construction to achieve fine-grained programmatic control over provider settings. See the
[documentation](https://www.pulumi.com/docs/reference/programming-model/#providers) for more information.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] client_certificate_path: The path to the Client Certificate associated with the Service Principal for use when authenticating as a Service
Principal using a Client Certificate.
:param pulumi.Input[str] client_id: The Client ID which should be used for service principal authentication.
:param pulumi.Input[str] client_secret: The password to decrypt the Client Certificate. For use when authenticating as a Service Principal using a Client
Certificate
:param pulumi.Input[bool] disable_terraform_partner_id: Disable the Terraform Partner ID which is used if a custom `partner_id` isn't specified.
:param pulumi.Input[str] environment: The cloud environment which should be used. Possible values are `global` (formerly `public`), `usgovernment`, `dod`,
`germany`, and `china`. Defaults to `global`.
:param pulumi.Input[str] metadata_host: [DEPRECATED] The Hostname which should be used for the Azure Metadata Service.
:param pulumi.Input[str] msi_endpoint: The path to a custom endpoint for Managed Identity - in most circumstances this should be detected automatically.
:param pulumi.Input[str] partner_id: A GUID/UUID that is registered with Microsoft to facilitate partner resource usage attribution.
:param pulumi.Input[str] tenant_id: The Tenant ID which should be used. Works with all authentication methods except Managed Identity.
:param pulumi.Input[bool] use_cli: Allow Azure CLI to be used for Authentication.
:param pulumi.Input[bool] use_microsoft_graph: Beta: Use the Microsoft Graph API, instead of the legacy Azure Active Directory Graph API, where supported.
:param pulumi.Input[bool] use_msi: Allow Managed Identity to be used for Authentication.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ProviderArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The provider type for the azuread package. By default, resources use package-wide configuration
settings, however an explicit `Provider` instance may be created and passed during resource
construction to achieve fine-grained programmatic control over provider settings. See the
[documentation](https://www.pulumi.com/docs/reference/programming-model/#providers) for more information.
:param str resource_name: The name of the resource.
:param ProviderArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProviderArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_certificate_password: Optional[pulumi.Input[str]] = None,
client_certificate_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
disable_terraform_partner_id: Optional[pulumi.Input[bool]] = None,
environment: Optional[pulumi.Input[str]] = None,
metadata_host: Optional[pulumi.Input[str]] = None,
msi_endpoint: Optional[pulumi.Input[str]] = None,
partner_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
use_cli: Optional[pulumi.Input[bool]] = None,
use_microsoft_graph: Optional[pulumi.Input[bool]] = None,
use_msi: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProviderArgs.__new__(ProviderArgs)
__props__.__dict__["client_certificate_password"] = client_certificate_password
__props__.__dict__["client_certificate_path"] = client_certificate_path
__props__.__dict__["client_id"] = client_id
__props__.__dict__["client_secret"] = client_secret
__props__.__dict__["disable_terraform_partner_id"] = pulumi.Output.from_input(disable_terraform_partner_id).apply(pulumi.runtime.to_json) if disable_terraform_partner_id is not None else None
if environment is None:
environment = (_utilities.get_env('ARM_ENVIRONMENT') or 'public')
__props__.__dict__["environment"] = environment
if metadata_host is None and not opts.urn:
raise TypeError("Missing required property 'metadata_host'")
__props__.__dict__["metadata_host"] = metadata_host
if msi_endpoint is None:
msi_endpoint = _utilities.get_env('ARM_MSI_ENDPOINT')
__props__.__dict__["msi_endpoint"] = msi_endpoint
__props__.__dict__["partner_id"] = partner_id
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["use_cli"] = pulumi.Output.from_input(use_cli).apply(pulumi.runtime.to_json) if use_cli is not None else None
__props__.__dict__["use_microsoft_graph"] = pulumi.Output.from_input(use_microsoft_graph).apply(pulumi.runtime.to_json) if use_microsoft_graph is not None else None
if use_msi is None:
use_msi = (_utilities.get_env_bool('ARM_USE_MSI') or False)
__props__.__dict__["use_msi"] = pulumi.Output.from_input(use_msi).apply(pulumi.runtime.to_json) if use_msi is not None else None
super(Provider, __self__).__init__(
'azuread',
resource_name,
__props__,
opts)
@property
@pulumi.getter(name="clientCertificatePassword")
def client_certificate_password(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "client_certificate_password")
@property
@pulumi.getter(name="clientCertificatePath")
def client_certificate_path(self) -> pulumi.Output[Optional[str]]:
"""
The path to the Client Certificate associated with the Service Principal for use when authenticating as a Service
Principal using a Client Certificate.
"""
return pulumi.get(self, "client_certificate_path")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[Optional[str]]:
"""
The Client ID which should be used for service principal authentication.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The password to decrypt the Client Certificate. For use when authenticating as a Service Principal using a Client
Certificate
"""
return pulumi.get(self, "client_secret")
@property
@pulumi.getter
def environment(self) -> pulumi.Output[Optional[str]]:
"""
The cloud environment which should be used. Possible values are `global` (formerly `public`), `usgovernment`, `dod`,
`germany`, and `china`. Defaults to `global`.
"""
return pulumi.get(self, "environment")
@property
@pulumi.getter(name="metadataHost")
def metadata_host(self) -> pulumi.Output[str]:
"""
[DEPRECATED] The Hostname which should be used for the Azure Metadata Service.
"""
return pulumi.get(self, "metadata_host")
@property
@pulumi.getter(name="msiEndpoint")
def msi_endpoint(self) -> pulumi.Output[Optional[str]]:
"""
The path to a custom endpoint for Managed Identity - in most circumstances this should be detected automatically.
"""
return pulumi.get(self, "msi_endpoint")
@property
@pulumi.getter(name="partnerId")
def partner_id(self) -> pulumi.Output[Optional[str]]:
"""
A GUID/UUID that is registered with Microsoft to facilitate partner resource usage attribution.
"""
return pulumi.get(self, "partner_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[Optional[str]]:
"""
The Tenant ID which should be used. Works with all authentication methods except Managed Identity.
"""
return pulumi.get(self, "tenant_id")
| 50.034884
| 203
| 0.668929
| 2,591
| 21,515
| 5.326129
| 0.087225
| 0.071739
| 0.085362
| 0.066957
| 0.861812
| 0.80587
| 0.759275
| 0.711667
| 0.676014
| 0.622174
| 0
| 0.000061
| 0.237044
| 21,515
| 429
| 204
| 50.151515
| 0.840634
| 0.318104
| 0
| 0.45283
| 1
| 0
| 0.106525
| 0.034806
| 0
| 0
| 0
| 0
| 0
| 1
| 0.150943
| false
| 0.060377
| 0.018868
| 0.007547
| 0.260377
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
f767aaaa672a636acc1c2951b5a6f2d2cf60787f
| 127
|
py
|
Python
|
ramda/negate_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 56
|
2018-08-06T08:44:58.000Z
|
2022-03-17T09:49:03.000Z
|
ramda/negate_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 28
|
2019-06-17T11:09:52.000Z
|
2022-02-18T16:59:21.000Z
|
ramda/negate_test.py
|
slavaGanzin/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 5
|
2019-09-18T09:24:38.000Z
|
2021-07-21T08:40:23.000Z
|
from .negate import negate
from ramda.private.asserts import assert_equal
def negate_test():
assert_equal(negate(5), -5)
| 18.142857
| 46
| 0.76378
| 19
| 127
| 4.947368
| 0.578947
| 0.234043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018349
| 0.141732
| 127
| 6
| 47
| 21.166667
| 0.844037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f7913b6a0547d5bc4707951e327ce11fe4eb28e2
| 1,620
|
py
|
Python
|
test/test_parse.py
|
terrykong/pyvirgo
|
9c6cb8d791446881265a4a0e3f601376c618dadc
|
[
"MIT"
] | 4
|
2021-05-23T21:07:44.000Z
|
2021-08-11T00:04:54.000Z
|
test/test_parse.py
|
terrykong/pyvirgo
|
9c6cb8d791446881265a4a0e3f601376c618dadc
|
[
"MIT"
] | 9
|
2021-02-22T02:04:36.000Z
|
2021-05-24T04:53:54.000Z
|
test/test_parse.py
|
terrykong/pyvirgo
|
9c6cb8d791446881265a4a0e3f601376c618dadc
|
[
"MIT"
] | 1
|
2021-05-24T05:00:32.000Z
|
2021-05-24T05:00:32.000Z
|
from virgo.parse import parse
def test_parse_simple_edge():
s = "a -> b"
result = parse(s)
assert result is not None
assert "a" in result.nodes
assert list(result.direct_successors_of("a")) == ["b"]
assert list(result.direct_successors_of("b")) == []
def test_parse_simple_edge_with_newline():
s = "a -> b\n"
result = parse(s)
assert result is not None
def test_parse_simple_node_description():
s = "parser = `goyacc parser.y`"
result = parse(s)
assert result is not None
assert "parser" in result.nodes
assert result.nodes["parser"] == "goyacc parser.y"
def test_parse_simple_node_description_with_blank_line():
s = "\nparser = `goyacc parser.y`"
result = parse(s)
assert result is not None
assert "parser" in result.nodes
assert result.nodes["parser"] == "goyacc parser.y"
def test_parse_simple_node_description_with_blank_lines():
s = "\n\nparser = `goyacc parser.y`"
result = parse(s)
assert result is not None
assert "parser" in result.nodes
assert result.nodes["parser"] == "goyacc parser.y"
def test_parse_simple_node_description_with_line_continuation():
s = "\n\nparser = |\n`goyacc parser.y`"
result = parse(s)
assert result is not None
assert "parser" in result.nodes
assert result.nodes["parser"] == "goyacc parser.y"
def test_parse_simple_node_description_with_line_continuation_in_desc():
s = "\n\nparser = `goyacc |\nparser.y`"
result = parse(s)
assert result is not None
assert "parser" in result.nodes
assert result.nodes["parser"] == "goyacc parser.y"
| 28.421053
| 72
| 0.683333
| 233
| 1,620
| 4.553648
| 0.154506
| 0.135721
| 0.110273
| 0.118756
| 0.889727
| 0.848256
| 0.753063
| 0.753063
| 0.72196
| 0.685203
| 0
| 0
| 0.198148
| 1,620
| 56
| 73
| 28.928571
| 0.816782
| 0
| 0
| 0.571429
| 0
| 0
| 0.187037
| 0
| 0
| 0
| 0
| 0
| 0.47619
| 1
| 0.166667
| false
| 0
| 0.02381
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e391fbc5f16ae9b6c4b4bf9b65cf2d527ce78ab8
| 37,538
|
py
|
Python
|
db_functions.py
|
atakangol/PLADAT_db
|
35c05c3fabfa6f26253065ea0e77bc3754acf266
|
[
"MIT"
] | null | null | null |
db_functions.py
|
atakangol/PLADAT_db
|
35c05c3fabfa6f26253065ea0e77bc3754acf266
|
[
"MIT"
] | null | null | null |
db_functions.py
|
atakangol/PLADAT_db
|
35c05c3fabfa6f26253065ea0e77bc3754acf266
|
[
"MIT"
] | null | null | null |
import psycopg2 as db
import sys
import json
url = "dbname='PlaDat' user='postgres' host='localhost' password='45581222'"
def print_psycopg2_exception(err):
err_type, err_obj, traceback = sys.exc_info()
# get the line number when exception occured
line_num = traceback.tb_lineno
# print the connect() error
print ("\npsycopg2 ERROR:", err, "on line number:", line_num)
print ("psycopg2 traceback:", traceback, "-- type:", err_type)
# psycopg2 extensions.Diagnostics object attribute
print ("\nextensions.Diagnostics:", err.diag)
# print the pgcode and pgerror exceptions
print ("pgerror:", err.pgerror)
print ("pgcode:", err.pgcode, "\n")
def test(t):
global url
print(url)
url = t
print(url)
return (url)
def create_tables():
#temp_url = "dbname='PlaDat2' user='postgres' host='localhost' password='45581222'"
with open("PlaDat.sql","r") as sql:
temp = sql.readlines()
statement = ""
for ii in temp:
statement += ii +" "
print(statement)
connection = db.connect(url)
cursor = connection.cursor()
cursor.execute(statement)
connection.commit()
cursor.close()
connection.close()
def drop_tables():
#temp_url = "dbname='PlaDat2' user='postgres' host='localhost' password='45581222'"
with open("DROP.sql","r") as sql:
temp = sql.readlines()
statement = ""
for ii in temp:
statement += ii +" "
print(statement)
connection = db.connect(url)
cursor = connection.cursor()
cursor.execute(statement)
connection.commit()
cursor.close()
connection.close()
def skill_format(l):
#print()
ret = []
for ii in l:
temp = ii.split(':')
r = {
"id":temp[0],
"name":temp[1],
"desc":temp[2]
}
ret.append(r)
#print(ret)
return ret
#city
def insert_city(country,name):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO public."CITIES"( "COUNTRY", "NAME") VALUES ('{}', '{}') RETURNING "ID";""".format(country.title(),name.title())
try:
cursor.execute(statement)
id = cursor.fetchone()
connection.commit()
id = int(id[0])
flag=True
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23505"):
print("this city already exists")
id=-1
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return (flag,id)
def search_city(term):
'''searches in both the name and the country'''
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT "ID", "COUNTRY", "NAME"
FROM public."CITIES"
where "NAME" ilike '%{a}%' or "COUNTRY" ilike '%{a}%';""".format(a=term.lower())
cursor.execute(statement)
results = cursor.fetchall()
return results
def get_all_cities():
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT *
FROM public."CITIES" """
cursor.execute(statement)
results = cursor.fetchall()
return results
def get_city(city_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT *
FROM public."CITIES"
where "ID"={}; """.format(city_id)
cursor.execute(statement)
results = cursor.fetchone()
return results
#student profile
def student_signup(email,name,password):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO public."STUDENTS"(
"EMAIL", "PASSWORD", "NAME")
VALUES ('{}', '{}', '{}') RETURNING "ID";""".format(email,password,name)
try:
cursor.execute(statement)
id = cursor.fetchone()
connection.commit()
id = int(id[0])
flag=True
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23505"):
print("this email is already in use")
id=-1
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return (flag,id)
def student_login(email,pw):
connection = db.connect(url)
cursor = connection.cursor()
statement = """SELECT "ID" FROM public."STUDENTS" where "EMAIL" = '{}' and "PASSWORD"='{}';""".format(email,pw)
cursor.execute(statement)
result = cursor.fetchone()
if result:
return (True,int(result[0]))
statement = """SELECT "ID" FROM public."STUDENTS" where "EMAIL" = '{}' ;""".format(email)
if result:
print('wrong password')
return (False,int(result[0]))
print("user doesn't exist")
return(False,-1)
def update_student_city(student_id,city_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."STUDENTS"
SET "CITY"={}
WHERE "ID"={} returning "ID";""".format(city_id,student_id)
try:
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
'''
if len(res) == 0:
"no student with this id"
#return (false,student_id)
id=student_id
flag=False'''
flag = True
try:
id = res[0][0]
except:
"no student with this id"
#return (false,student_id)
id=student_id
flag=False
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23503"):
print("no city with this id found")
id=-1
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return (flag,id)
def update_student_department(student_id,department_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."STUDENTS"
SET "DEPARTMENT"={}
WHERE "ID"={} returning "ID";""".format(department_id,student_id)
try:
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
'''
if len(res) == 0:
"no student with this id"
#return (false,student_id)
id=student_id
flag=False'''
flag = True
try:
id = res[0][0]
except:
"no student with this id"
#return (false,student_id)
id=student_id
flag=False
except Exception as err:
# pass exception to function
print_psycopg2_exception(err)
if (err.pgcode == "23503"):
print("no department with this id found")
id=-1
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return (flag,id)
def update_student_university(student_id,uni_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."STUDENTS"
SET "UNIVERSITY"={}
WHERE "ID"={} returning "ID";""".format(uni_id,student_id)
try:
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
'''
if len(res) == 0:
"no student with this id"
#return (false,student_id)
id=student_id
flag=False'''
flag = True
try:
id = res[0][0]
except:
"no student with this id"
#return (false,student_id)
id=student_id
flag=False
except Exception as err:
# pass exception to function
print_psycopg2_exception(err)
if (err.pgcode == "23503"):
print("no uni with this id found")
id=-1
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return (flag,id)
def get_student_details(user_id): #fix empty query
'''spesik öğrencinin
(id, name universite bölüm fakülte yaşadığı şehir
liste halinde skill:açıklama(boş gelebilir) )
'''
connection = db.connect(url)
cursor = connection.cursor()
statement ="""select
S."ID" as id,
S."NAME" as name,
U."NAME" as university,
D."NAME" as department,
D."FACULTY" as faculty,
C."NAME" as student_city,
S."EMP_PREF" as preferred_emp,
S."GRADE" as grade,
S."AGE" as age,
ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list
from "STUDENTS" S
left join "STUDENT_SKILL" SS on S."ID" = SS."STU_ID"
left join "SKILLS" SK on SS."SKILL_ID" = SK."ID"
left join "UNIVERSITIES" U on U."ID"=S."UNIVERSITY"
left join "DEPARTMENTS" D on D."ID"=S."DEPARTMENT"
left join "CITIES" C on C."ID"=S."CITY"
where S."ID" = {}
GROUP BY S."ID",S."NAME",U."NAME",D."NAME",D."FACULTY",C."NAME",S."EMP_PREF"
""".format(user_id)
cursor.execute(statement)
result = cursor.fetchone()
return(result)
def update_student_pref(stu_id,pref):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."STUDENTS"
SET "EMP_PREF"= '{}'
WHERE "ID"={} returning "ID";""".format(pref,stu_id)
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
try:
#id = res[0][0]
flag= True
except:
print( "no student with this id" )
#return (false,student_id)
#id=stu_id
flag=False
cursor.close()
connection.close()
return (flag,stu_id)
def update_student_age(stu_id,age):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."STUDENTS"
SET "AGE"= '{}'
WHERE "ID"={} returning "ID";""".format(age,stu_id)
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
try:
#id = res[0][0]
flag= True
except:
print( "no student with this id" )
#return (false,student_id)
#id=stu_id
flag=False
cursor.close()
connection.close()
return (flag,stu_id)
def update_student_grade(stu_id,grade):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."STUDENTS"
SET "GRADE"= '{}'
WHERE "ID"={} returning "ID";""".format(grade,stu_id)
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
try:
#id = res[0][0]
flag= True
except:
print( "no student with this id" )
#return (false,student_id)
#id=stu_id
flag=False
cursor.close()
connection.close()
return (flag,stu_id)
#departments and universities
def add_department(name,faculty):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO public."DEPARTMENTS"(
"FACULTY", "NAME")
VALUES ( '{}','{}') RETURNING "ID";""".format(faculty.title(),name.title())
try:
cursor.execute(statement)
id = cursor.fetchone()
#print(id)
id = id[0]
connection.commit()
flag=True
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23505"):
print("this department already exists")
id=-1
flag=False
finally:
cursor.close()
connection.close()
return (flag,id)
def get_all_departments():
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT *
FROM public."DEPARTMENTS" """
cursor.execute(statement)
results = cursor.fetchall()
return results
def get_unique_faculties():
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT DISTINCT "FACULTY"
FROM public."DEPARTMENTS" """
cursor.execute(statement)
results = cursor.fetchall()
return results
def get_unique_departments():
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT DISTINCT "NAME"
FROM public."DEPARTMENTS" """
cursor.execute(statement)
results = cursor.fetchall()
return results
def search_department(term):
'''searches in both the name and the faculty'''
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT "ID", "FACULTY", "NAME"
FROM public."DEPARTMENTS"
where "NAME" ilike '%{a}%' or "FACULTY" ilike '%{a}%';""".format(a=term.lower())
cursor.execute(statement)
results = cursor.fetchall()
return results
def add_university(name,city_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO public."UNIVERSITIES"(
"NAME", "CITY")
VALUES ( '{}',{}) RETURNING "ID";""".format(name.title(),city_id)
try:
cursor.execute(statement)
id = cursor.fetchone()
#print(id)
id = id[0]
connection.commit()
flag=True
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23505"):
print("this university already exists")
id=-1
flag=False
elif (err.pgcode == "23503"):
print("no city with this id found")
id=-1
flag=False
flag=False
finally:
cursor.close()
connection.close()
return (flag,id)
def search_university(term):
'''searches in the name '''
connection = db.connect(url)
cursor = connection.cursor()
statement = """ select "UNIVERSITIES"."ID","UNIVERSITIES"."NAME","CITIES"."COUNTRY","CITIES"."NAME"
from "UNIVERSITIES" inner join "CITIES" ON ("UNIVERSITIES"."CITY" = "CITIES"."ID")
where "UNIVERSITIES"."NAME" ilike '%{a}%';""".format(a=term.lower())
cursor.execute(statement)
results = cursor.fetchall()
return results
def all_unis():
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT
"UNIVERSITIES"."ID",
"UNIVERSITIES"."NAME",
"CITIES"."NAME",
"CITIES"."COUNTRY"
FROM public."UNIVERSITIES"
inner join "CITIES" ON "UNIVERSITIES"."CITY" = "CITIES"."ID" """
cursor.execute(statement)
results = cursor.fetchall()
return results
#skills
def get_all_skills():
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT *
FROM public."SKILLS" """
cursor.execute(statement)
results = cursor.fetchall()
return results
def add_skill(name,desc=None):
connection = db.connect(url)
cursor = connection.cursor()
if desc:
statement = """INSERT INTO public."SKILLS"(
"NAME", "DESCRIPTION")
VALUES ( '{}','{}') RETURNING "ID";""".format(name,desc.title())
else:
statement = """INSERT INTO public."SKILLS"(
"NAME")
VALUES ( '{}') RETURNING "ID";""".format(name)
try:
cursor.execute(statement)
id = cursor.fetchone()
#print(id)
id = id[0]
connection.commit()
flag=True
except Exception as err:
# pass exception to function
print_psycopg2_exception(err)
flag=False
finally:
cursor.close()
connection.close()
return (flag,id)
def search_skill(term):
'''searches in both the name and the description'''
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT "ID", "NAME", "DESCRIPTION"
FROM public."SKILLS"
where "NAME" ilike '%{a}%' or "DESCRIPTION" ilike '%{a}%';""".format(a=term.lower())
cursor.execute(statement)
results = cursor.fetchall()
return results
def add_student_skill(student_id,skill_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO public."STUDENT_SKILL"(
"STU_ID", "SKILL_ID")
VALUES ({}, {}) RETURNING "ID";""".format(student_id,skill_id)
try:
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
flag = True
try:
id = res[0][0]
except:
print("no student with this id" )
#return (false,student_id)
id=student_id
flag=False
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23503"):
print("this skill or student doesnt exist")
id=-1
flag=False
if (err.pgcode == "23505"):
print("this student already has this skill")
id=-1
flag=False
id=-1
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return (flag,id)
def remove_student_skill(student_id,skill_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """DELETE FROM public."STUDENT_SKILL"
WHERE "STU_ID"={} AND "SKILL_ID"={};""".format(student_id,skill_id)
try:
cursor.execute(statement)
connection.commit()
flag = True
except Exception as err:
# pass exception to function
print_psycopg2_exception(err)
flag=False
finally:
cursor.close()
connection.close()
return flag
def search_students_by_skill(term):
'''skilllerde ve açıklamalırında arama yapıp
öğrencileri liste halinde
(id, name universite bölüm fakülte yaşadığı şehir
liste halinde skill:açıklama(boş gelebilir) )'''
connection = db.connect(url)
cursor = connection.cursor()
statement = """select *
from (select
S."ID" as id,
S."NAME" as name,
U."NAME" as university,
D."NAME" as department,
D."FACULTY" as faculty,
C."NAME" as student_city,
S."EMP_PREF" as preferred_emp,
S."GRADE" as grade,
S."AGE" as age,
ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list
from "STUDENTS" S
left join "STUDENT_SKILL" SS on S."ID" = SS."STU_ID"
left join "SKILLS" SK on SS."SKILL_ID" = SK."ID"
left join "UNIVERSITIES" U on U."ID"=S."UNIVERSITY"
left join "DEPARTMENTS" D on D."ID"=S."DEPARTMENT"
left join "CITIES" C on C."ID"=S."CITY"
GROUP BY S."ID",S."NAME",U."NAME",D."NAME",D."FACULTY",C."NAME",S."EMP_PREF"
) as res
where array_to_string(skill_list, ',') ilike '%{t}%'
""".format(t=term)
cursor.execute(statement)
results = cursor.fetchall()
return(results)
def search_students_by_skill_ids(ids):
'''spesifik skillerden birine sahiğ
öğrencileri liste halinde
(id, name universite bölüm fakülte yaşadığı şehir
liste halinde skill:açıklama(boş gelebilir) )'''
key = []
for ii in ids.split(","):
key.append(int(ii))
if len(key)==1:
key = "["+str(key[0]) + "]"
else:
key = list(key)
connection = db.connect(url)
cursor = connection.cursor()
statement = """select *
from (select
S."ID" as id,
S."NAME" as name,
U."NAME" as university,
D."NAME" as department,
D."FACULTY" as faculty,
C."NAME" as student_city,
S."EMP_PREF" as preferred_emp,
S."GRADE" as grade,
S."AGE" as age,
ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list,
ARRAY_AGG(SK."ID") as skill_ids
from "STUDENTS" S
left join "STUDENT_SKILL" SS on S."ID" = SS."STU_ID"
left join "SKILLS" SK on SS."SKILL_ID" = SK."ID"
left join "UNIVERSITIES" U on U."ID"=S."UNIVERSITY"
left join "DEPARTMENTS" D on D."ID"=S."DEPARTMENT"
left join "CITIES" C on C."ID"=S."CITY"
GROUP BY S."ID",S."NAME",U."NAME",D."NAME",D."FACULTY",C."NAME",S."EMP_PREF"
) as res
where ARRAY{} && skill_ids
""".format(key)
cursor.execute(statement)
results = cursor.fetchall()
return(results)
#company profile
def company_signup(email,name,password):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO public."COMPANIES"(
"EMAIL", "PASSWORD", "NAME")
VALUES ('{}', '{}','{}') RETURNING "ID" ;""".format(email,password,name)
try:
cursor.execute(statement)
id = cursor.fetchone()
connection.commit()
id = int(id[0])
flag=True
except Exception as err:
if (err.pgcode == "23505"):
print("this email is already in use")
id=-1
flag=False
finally:
cursor.close()
connection.close()
return (flag,id)
def is_verified_company(v):
#admin companyyi verified etmeli
v = True
return v
def company_login(email,pw):
connection = db.connect(url)
cursor = connection.cursor()
statement = """SELECT "ID" FROM public."COMPANIES" where "EMAIL" = '{}' and "PASSWORD"='{}';""".format(email,pw)
cursor.execute(statement)
result = cursor.fetchone()
if result:
return (True,int(result[0]))
statement = """SELECT "ID" FROM public."COMPANIES" where "EMAIL" = '{}' ;""".format(email)
if result:
print('wrong password')
return (False,int(result[0]))
print("Company doesn't exist")
return(False,-1)
def update_company_city(company_id,city_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."COMPANIES"
SET "CITY"={}
WHERE "ID"={} returning "ID";""".format(city_id,company_id)
try:
cursor.execute(statement)
connection.commit()
flag = True
except Exception as err:
if (err.pgcode == "23503"):
print("No city with this id found")
id=-1
flag=False
finally:
cursor.close()
connection.close()
return flag
def update_company_profile(company_id, excid, excname, excdob):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."COMPANIES"
SET "EXC_ID"={}, "EXC_NAME"='{}',"EXC_DOB"='{}'
WHERE "ID"={} returning "ID";""".format(excid, excname, excdob,company_id)
cursor.execute(statement)
res = cursor.fetchall()
connection.commit()
try:
flag= True
except:
print( "no company with this id" )
flag=False
cursor.close()
connection.close()
return (flag,company_id)
def get_company_details(company_id):
connection = db.connect(url)
cursor = connection.cursor()
statement ="""select
C."ID" as id,
C."EMAIL" as email,
C."NAME" as name,
C."CITY" as city,
C."EXC_ID" as excid,
C."EXC_NAME" as excname,
C."EXC_DOB" as excdob,
--ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list
ARRAY_AGG( JB."ID") as job_list
from "COMPANIES" C
left join "JOB_LISTINGS" as JB on JB."COMPANY" = C."ID"
--left join "JOB_REQ" AS JQ ON JQ."JOB_ID" = JB."ID"
--left join "SKILLS" AS SK ON JQ."REQ_ID" = SK."ID"
where C."ID" = {}
GROUP BY id,email,name,excid,excname,excdob
""".format(company_id)
cursor.execute(statement)
result = cursor.fetchone()
return(result)
#job listings
def add_job_listing(company_id,pref, description=None):
connection = db.connect(url)
cursor = connection.cursor()
if description:
statement = """INSERT INTO public."JOB_LISTINGS" ("COMPANY","DESCRIPTION","EMP_PREF")
VALUES ( '{}','{}','{}') RETURNING "ID";""".format(company_id, description,pref)
else:
statement = """INSERT INTO public."JOB_LISTINGS" ("COMPANY","EMP_PREF")
VALUES ( '{}','{}') RETURNING "ID";""".format(company_id,pref)
try:
cursor.execute(statement)
id = cursor.fetchone()
id = id[0]
connection.commit()
flag=True
cursor.close()
connection.close()
return (flag,id)
except Exception as err:
print_psycopg2_exception(err)
flag=False
cursor.close()
connection.close()
return (flag,-1)
finally:
cursor.close()
connection.close()
def add_job_req(job_id,skill_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO public."JOB_REQ"(
"JOB_ID", "REQ_ID")
VALUES ({}, {}) RETURNING "ID";""".format(job_id,skill_id)
try:
cursor.execute(statement)
res = cursor.fetchall()
#print(len(res))
connection.commit()
flag = True
try:
id = res[0][0]
except:
print("no job with this id" )
#return (false,student_id)
id=job_id
flag=False
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23503"):
print("this skill or job doesnt exist")
id=-1
flag=False
if (err.pgcode == "23505"):
print("this job already has this skill")
id=-1
flag=False
id=-1
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return (flag,id)
def remove_job_req(job_id,skill_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """DELETE FROM public."JOB_REQ"
WHERE "JOB_ID"={} AND "REQ_ID"={};""".format(job_id,skill_id)
try:
cursor.execute(statement)
connection.commit()
flag = True
except Exception as err:
# pass exception to function
print_psycopg2_exception(err)
flag=False
finally:
cursor.close()
connection.close()
return flag
def update_job(joblisting_id,city_id,new_desc,pref):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."JOB_LISTINGS"
SET "LOCATION"={},"DESCRIPTION" = '{}' , "EMP_PREF"='{}'
WHERE "ID"={} returning "ID";""".format(city_id,new_desc,pref,joblisting_id)
try:
cursor.execute(statement)
res = cursor.fetchall()
connection.commit()
flag = True
except Exception as err:
flag = False
print_psycopg2_exception(err)
finally:
cursor.close()
connection.close()
return (flag)
def update_joblisting_location(job_id,city_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE public."JOB_LISTINGS"
SET "LOCATION"={}
WHERE "ID"={} returning "ID";""".format(city_id,job_id)
try:
cursor.execute(statement)
res = cursor.fetchall()
connection.commit()
flag = True
except Exception as err:
flag = False
print_psycopg2_exception(err)
finally:
cursor.close()
connection.close()
return (flag)
def delete_job(joblisting_id):
statement = """ delete from "JOB_REQ" WHERE "JOB_ID" = {j};
DELETE FROM "APPLICATIONS" WHERE "JOB_ID" ={j} ;
DELETE FROM "JOB_LISTINGS" WHERE "ID" ={j} ;
""".format(j = joblisting_id)
connection = db.connect(url)
cursor = connection.cursor()
cursor.execute(statement)
connection.commit()
cursor.close()
connection.close()
return True
def get_all_jobs():
''' job_id,company_id,job_desc,company_name,city,country,skill_list '''
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT
JB."ID" ID,
JB."COMPANY" AS COMPANY_ID,
JB."DESCRIPTION" DESCRIPTION,
C."NAME" AS COMPANY_NAME,
CT."NAME" AS CITY,
CT."COUNTRY" AS COUNTRY,
JB."EMP_PREF" AS type,
ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list
FROM "JOB_LISTINGS" AS JB
INNER JOIN "COMPANIES" AS C ON C."ID"=JB."COMPANY"
INNER JOIN "CITIES" AS CT ON JB."LOCATION"=CT."ID"
LEFT JOIN "JOB_REQ" AS JQ ON JQ."JOB_ID"=JB."ID"
LEFT JOIN "SKILLS" AS SK ON SK."ID"=JQ."REQ_ID"
GROUP BY ID,COMPANY_ID,DESCRIPTION,COMPANY_NAME,CITY,COUNTRY """
cursor.execute(statement)
results = cursor.fetchall()
return results
def get_job_details(job_id):
''' job_id,company_id,job_desc,company_name,city,country,skill_list '''
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT
JB."ID" ID,
JB."COMPANY" AS COMPANY_ID,
JB."DESCRIPTION" DESCRIPTION,
C."NAME" AS COMPANY_NAME,
CT."NAME" AS CITY,
CT."COUNTRY" AS COUNTRY,
JB."EMP_PREF" AS type,
ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list
FROM "JOB_LISTINGS" AS JB
INNER JOIN "COMPANIES" AS C ON C."ID"=JB."COMPANY"
INNER JOIN "CITIES" AS CT ON JB."LOCATION"=CT."ID"
LEFT JOIN "JOB_REQ" AS JQ ON JQ."JOB_ID"=JB."ID"
LEFT JOIN "SKILLS" AS SK ON SK."ID"=JQ."REQ_ID"
where JB."ID"={}
GROUP BY ID,COMPANY_ID,DESCRIPTION,COMPANY_NAME,CITY,COUNTRY """.format(job_id)
cursor.execute(statement)
results = cursor.fetchone()
return results
def search_jobs_by_skill(term):
connection = db.connect(url)
cursor = connection.cursor()
statement = """select * from (
SELECT
JB."ID" ID,
JB."COMPANY" AS COMPANY_ID,
JB."DESCRIPTION" DESCRIPTION,
C."NAME" AS COMPANY_NAME,
CT."NAME" AS CITY,
CT."COUNTRY" AS COUNTRY,
JB."EMP_PREF" AS type,
ARRAY_AGG( concat(SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list,
ARRAY_AGG(SK."ID") as skill_ids
FROM "JOB_LISTINGS" AS JB
INNER JOIN "COMPANIES" AS C ON C."ID"=JB."COMPANY"
INNER JOIN "CITIES" AS CT ON JB."LOCATION"=CT."ID"
LEFT JOIN "JOB_REQ" AS JQ ON JQ."JOB_ID"=JB."ID"
LEFT JOIN "SKILLS" AS SK ON SK."ID"=JQ."REQ_ID"
GROUP BY ID,COMPANY_ID,DESCRIPTION,COMPANY_NAME,CITY,COUNTRY
) as res
where array_to_string(skill_list, ',') ilike '%{t}%';
""".format(t=term.lower())
cursor.execute(statement)
results = cursor.fetchall()
return(results)
def search_jobs_by_skill_ids(ids):
key = []
for ii in ids.split(","):
key.append(int(ii))
if len(key)==1:
key = "["+str(key[0]) + "]"
else:
key = list(key)
connection = db.connect(url)
cursor = connection.cursor()
statement = """select * from (
SELECT
JB."ID" ID,
JB."COMPANY" AS COMPANY_ID,
JB."DESCRIPTION" DESCRIPTION,
C."NAME" AS COMPANY_NAME,
CT."NAME" AS CITY,
CT."COUNTRY" AS COUNTRY,
JB."EMP_PREF" AS type,
ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list,
ARRAY_AGG(SK."ID") as skill_ids
FROM "JOB_LISTINGS" AS JB
INNER JOIN "COMPANIES" AS C ON C."ID"=JB."COMPANY"
INNER JOIN "CITIES" AS CT ON JB."LOCATION"=CT."ID"
LEFT JOIN "JOB_REQ" AS JQ ON JQ."JOB_ID"=JB."ID"
LEFT JOIN "SKILLS" AS SK ON SK."ID"=JQ."REQ_ID"
GROUP BY ID,COMPANY_ID,DESCRIPTION,COMPANY_NAME,CITY,COUNTRY
) as res
where ARRAY{} && skill_ids
""".format(key)
cursor.execute(statement)
results = cursor.fetchall()
return(results)
def search_jobs(term):
''' job_id,company_id,job_desc,company_name,city,country,skill_list '''
connection = db.connect(url)
cursor = connection.cursor()
statement = """ SELECT
JB."ID" ID,
JB."COMPANY" AS COMPANY_ID,
JB."DESCRIPTION" DESCRIPTION,
C."NAME" AS COMPANY_NAME,
CT."NAME" AS CITY,
CT."COUNTRY" AS COUNTRY,
JB."EMP_PREF" AS type,
ARRAY_AGG( concat(SK."ID",':',SK."NAME", ':' ,SK."DESCRIPTION")) as skill_list,
ARRAY_AGG(SK."ID") as skill_ids
FROM "JOB_LISTINGS" AS JB
INNER JOIN "COMPANIES" AS C ON C."ID"=JB."COMPANY"
INNER JOIN "CITIES" AS CT ON JB."LOCATION"=CT."ID"
LEFT JOIN "JOB_REQ" AS JQ ON JQ."JOB_ID"=JB."ID"
LEFT JOIN "SKILLS" AS SK ON SK."ID"=JQ."REQ_ID"
where JB."DESCRIPTION" ilike '%{}%'
GROUP BY ID,COMPANY_ID,DESCRIPTION,COMPANY_NAME,CITY,COUNTRY """.format(term.lower())
cursor.execute(statement)
results = cursor.fetchall()
return results
#applications
#direction true = job offer by company to student
#direction false = student application to company
def new_application(student_id,job_id,direction):
connection = db.connect(url)
cursor = connection.cursor()
statement = """INSERT INTO "APPLICATIONS" ("JOB_ID","STU_ID","DIRECTION","RESPONSE" )
VALUES ({},{},{},False) """.format(job_id,student_id,direction)
try:
cursor.execute(statement)
#print(len(res))
connection.commit()
flag = True
except Exception as err:
# pass exception to function
#print_psycopg2_exception(err)
if (err.pgcode == "23503"):
print("this JOB or student doesnt exist")
flag=False
if (err.pgcode == "23505"):
print("this student - JOB COMBO EXİSTS")
flag=False
flag=False
#print(id)
finally:
cursor.close()
connection.close()
return flag
def delete_application(student_id,job_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """DELETE FROM "APPLICATIONS"
WHERE "STU_ID"={} AND "JOB_ID"={};""".format(student_id,job_id)
try:
cursor.execute(statement)
connection.commit()
flag = True
except Exception as err:
# pass exception to function
print_psycopg2_exception(err)
flag=False
finally:
cursor.close()
connection.close()
return flag
def positive_response(student_id,job_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """UPDATE "APPLICATIONS"
SET "RESPONSE"=True
WHERE "STU_ID"={} AND "JOB_ID"={};""".format(student_id,job_id)
try:
cursor.execute(statement)
connection.commit()
flag = True
except Exception as err:
if (err.pgcode == "23503"):
print("No application with these ids found")
flag=False
finally:
cursor.close()
connection.close()
return flag
def get_applications_of_company(company_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """SELECT
"JOB_ID",
"STU_ID",
"DIRECTION",
"RESPONSE",
"COMPANY",
"DESCRIPTION",
"LOCATION" AS loc_id
FROM "APPLICATIONS" AS A
INNER JOIN "JOB_LISTINGS" AS JL ON JL."ID" = A."JOB_ID"
where "COMPANY"={} ; """.format(company_id)
cursor.execute(statement)
results = cursor.fetchall()
return results
def get_applications_of_student(stu_id):
connection = db.connect(url)
cursor = connection.cursor()
statement = """SELECT
"JOB_ID",
"STU_ID",
"DIRECTION",
"RESPONSE",
"COMPANY",
"DESCRIPTION",
"LOCATION" AS loc_id,
C."NAME" AS company_name
FROM "APPLICATIONS" AS A
INNER JOIN "JOB_LISTINGS" AS JL ON JL."ID" = A."JOB_ID"
INNER JOIN "COMPANIES" AS C ON C."ID"=JL."COMPANY"
where "STU_ID"={}; """.format(stu_id)
cursor.execute(statement)
results = cursor.fetchall()
return results
if __name__ == "__main__":
#print(add_university("university of paris",17) )
#print(student_login("example@mail.com","ataka"))
#print(update_student_city(1,2))
#student_signup("example@mail.com","asdas","45581222")
#print(search_university("ber"))
#print(update_student_university(4,1))
#print(remove_student_skill(10,12))
#add_skill("Excel3")
#add_student_skill(4,3)
#print(update_student_pref(10,"fulltime"))
#company_signup("AWS@mail.com","aws","password","dummy aws", "01/01/99")
#print(company_login("AWS@mail.com", "password"))
#print(company_login("yanlis@mail.com", "password"))
#print(update_company_city(1,3))
#add_job_listing(1, "a nice company :D")
#print( update_joblisting_location(1,3 ))
#print( search_students_by_skill_ids( 7 ) )
#print(get_user_details(4))
#print(search_jobs_by_skill_ids("1,4"))
#print(delete_application(8,2))
#print(new_application(8,2,False))
print(remove_job_req(4,1))
| 30.870066
| 143
| 0.582024
| 4,479
| 37,538
| 4.775843
| 0.060951
| 0.02861
| 0.045299
| 0.052452
| 0.842036
| 0.813613
| 0.80043
| 0.784769
| 0.764527
| 0.729279
| 0
| 0.008623
| 0.280169
| 37,538
| 1,215
| 144
| 30.895473
| 0.782984
| 0.087751
| 0
| 0.773604
| 0
| 0.008122
| 0.354006
| 0.047727
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055838
| false
| 0.011168
| 0.003046
| 0
| 0.108629
| 0.047716
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3a5d7e2ef66e84a4e97e7fc8850e47f4482e86f
| 2,566
|
py
|
Python
|
day01/tests.py
|
andrewyang96/AdventOfCode2017
|
665d7869fb8677f41c07ca2177b4fe3ea3356fec
|
[
"MIT"
] | null | null | null |
day01/tests.py
|
andrewyang96/AdventOfCode2017
|
665d7869fb8677f41c07ca2177b4fe3ea3356fec
|
[
"MIT"
] | null | null | null |
day01/tests.py
|
andrewyang96/AdventOfCode2017
|
665d7869fb8677f41c07ca2177b4fe3ea3356fec
|
[
"MIT"
] | null | null | null |
from solution import captcha
from solution import captcha2
num = '8231753674683997878179259195565332579493378483264978184143341284379682788518559178822225126625428318115396632681141871952894291898364781898929292614792884883249356728741993224889167928232261325123447569829932951268292953928766755779761837993812528527484487298117739869189415599461746944992651752768158611996715467871381527675219481185217357632445748912726487669881876129192932995282777848496561259839781188719233951619188388532698519298142112853776942545211859134185231768952888462471642851588368445761489225786919778983848113833773768236969923939838755997989537648222217996381757542964844337285428654375499359997792679256881378967852376848812795761118139288152799921176874256377615952758268844139579622754965461884862647423491918913628848748756595463191585555385849335742224855473769411212376446591654846168189278959857681336724221434846946124915271196433144335482787432683848594487648477532498952572515118864475621828118274911298396748213136426357769991314661642612786847135485969889237193822718111269561741563479116832364485724716242176288642371849569664594194674763319687735723517614962575592111286177553435651952853878775431234327919595595658641534765455489561934548474291254387229751472883423413196845162752716925199866591883313638846474321161569892518574346226751366315311145777448781862222126923449311838564685882695889397531413937666673233451216968414288135984394249684886554812761191289485457945866524228415191549168557957633386991931186773843869999284468773866221976873998168818944399661463963658784821796272987155278195355579386768156718813624559264574836134419725187881514665834441359644955768658663278765363789664721736533517774292478192143934318399418188298753351815388561359528533778996296279366394386455544446922653976725113889842749182361253582433319351193862788433113852782596161148992233558144692913791714859516653421917841295749163469751479835492713392861519993791967927773114713888458982796514977717987598165486967786989991998142488631168697963816156374216224386193941566358543266646516247854435356941566492841213424915682394928959116411457967897614457497279472661229548612777155998358618945222326558176486944695689777438164612198225816646583996426313832539918'
assert captcha('1122') == 3
assert captcha('1111') == 4
assert captcha('1234') == 0
assert captcha('91212129') == 9
print(captcha(num))
assert captcha2('1212') == 6
assert captcha2('1221') == 0
assert captcha2('123425') == 4
assert captcha2('123123') == 12
assert captcha2('12131415') == 4
print(captcha2(num))
| 128.3
| 2,190
| 0.953624
| 52
| 2,566
| 47.057692
| 0.461538
| 0.028606
| 0.014712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.894506
| 0.021044
| 2,566
| 19
| 2,191
| 135.052632
| 0.079618
| 0
| 0
| 0
| 0
| 0
| 0.869057
| 0.850351
| 0
| 1
| 0
| 0
| 0.642857
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.142857
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e3abf3f082cf648cb8965c1470ee35c80a89c553
| 95,465
|
py
|
Python
|
release/stubs.min/System/Collections/Generic.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Collections/Generic.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Collections/Generic.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module System.Collections.Generic calls itself Generic
# from mscorlib,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089,System,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089
# by generator 1.145
# no doc
# no important
# no functions
# classes
class Comparer(object):
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return Comparer()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Compare(self,x,y):
"""
Compare(self: Comparer[T],x: T,y: T) -> int
When overridden in a derived class,performs a comparison of two objects of the same type and returns a value indicating whether one object is less than,equal to,or
greater than the other.
x: The first object to compare.
y: The second object to compare.
Returns: A signed integer that indicates the relative values of x and y,as shown in the following table.Value Meaning Less than zero x is less than y.Zero x equals y.Greater than
zero x is greater than y.
"""
pass
@staticmethod
def Create(comparison):
""" Create(comparison: Comparison[T]) -> Comparer[T] """
pass
def __cmp__(self,*args):
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
class Dictionary(object):
"""
Dictionary[TKey,TValue]()
Dictionary[TKey,TValue](capacity: int)
Dictionary[TKey,TValue](comparer: IEqualityComparer[TKey])
Dictionary[TKey,TValue](capacity: int,comparer: IEqualityComparer[TKey])
Dictionary[TKey,TValue](dictionary: IDictionary[TKey,TValue],comparer: IEqualityComparer[TKey])
Dictionary[TKey,TValue](dictionary: IDictionary[TKey,TValue])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return Dictionary()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,key,value):
"""
Add(self: Dictionary[TKey,TValue],key: TKey,value: TValue)
Adds the specified key and value to the dictionary.
key: The key of the element to add.
value: The value of the element to add. The value can be null for reference types.
"""
pass
def Clear(self):
"""
Clear(self: Dictionary[TKey,TValue])
Removes all keys and values from the System.Collections.Generic.Dictionary.
"""
pass
def ContainsKey(self,key):
"""
ContainsKey(self: Dictionary[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.Dictionary contains the specified key.
key: The key to locate in the System.Collections.Generic.Dictionary.
Returns: true if the System.Collections.Generic.Dictionary contains an element with the specified key; otherwise,false.
"""
pass
def ContainsValue(self,value):
"""
ContainsValue(self: Dictionary[TKey,TValue],value: TValue) -> bool
Determines whether the System.Collections.Generic.Dictionary contains a specific value.
value: The value to locate in the System.Collections.Generic.Dictionary. The value can be null for reference types.
Returns: true if the System.Collections.Generic.Dictionary contains an element with the specified value; otherwise,false.
"""
pass
def GetEnumerator(self):
"""
GetEnumerator(self: Dictionary[TKey,TValue]) -> Enumerator
Returns an enumerator that iterates through the System.Collections.Generic.Dictionary.
Returns: A System.Collections.Generic.Dictionary structure for the System.Collections.Generic.Dictionary.
"""
pass
def GetObjectData(self,info,context):
"""
GetObjectData(self: Dictionary[TKey,TValue],info: SerializationInfo,context: StreamingContext)
Implements the System.Runtime.Serialization.ISerializable interface and returns the data needed to serialize the System.Collections.Generic.Dictionary instance.
info: A System.Runtime.Serialization.SerializationInfo object that contains the information required to serialize the System.Collections.Generic.Dictionary instance.
context: A System.Runtime.Serialization.StreamingContext structure that contains the source and destination of the serialized stream associated with the
System.Collections.Generic.Dictionary instance.
"""
pass
def OnDeserialization(self,sender):
"""
OnDeserialization(self: Dictionary[TKey,TValue],sender: object)
Implements the System.Runtime.Serialization.ISerializable interface and raises the deserialization event when the deserialization is complete.
sender: The source of the deserialization event.
"""
pass
def Remove(self,key):
"""
Remove(self: Dictionary[TKey,TValue],key: TKey) -> bool
Removes the value with the specified key from the System.Collections.Generic.Dictionary.
key: The key of the element to remove.
Returns: true if the element is successfully found and removed; otherwise,false. This method returns false if key is not found in the System.Collections.Generic.Dictionary.
"""
pass
def TryGetValue(self,key,value):
""" TryGetValue(self: Dictionary[TKey,TValue],key: TKey) -> (bool,TValue) """
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
"""
__contains__(self: IDictionary[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.IDictionary contains an element with the specified key.
key: The key to locate in the System.Collections.Generic.IDictionary.
Returns: true if the System.Collections.Generic.IDictionary contains an element with the key; otherwise,false.
__contains__(self: IDictionary,key: object) -> bool
Determines whether the System.Collections.IDictionary object contains an element with the specified key.
key: The key to locate in the System.Collections.IDictionary object.
Returns: true if the System.Collections.IDictionary contains an element with the key; otherwise,false.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,capacity: int)
__new__(cls: type,comparer: IEqualityComparer[TKey])
__new__(cls: type,capacity: int,comparer: IEqualityComparer[TKey])
__new__(cls: type,dictionary: IDictionary[TKey,TValue])
__new__(cls: type,dictionary: IDictionary[TKey,TValue],comparer: IEqualityComparer[TKey])
__new__(cls: type,info: SerializationInfo,context: StreamingContext)
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
"""
__repr__(self: Dictionary[TKey,TValue]) -> str
__repr__(self: Dictionary[K,V]) -> str
"""
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Comparer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Collections.Generic.IEqualityComparer that is used to determine equality of keys for the dictionary.
Get: Comparer(self: Dictionary[TKey,TValue]) -> IEqualityComparer[TKey]
"""
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of key/value pairs contained in the System.Collections.Generic.Dictionary.
Get: Count(self: Dictionary[TKey,TValue]) -> int
"""
Keys=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection containing the keys in the System.Collections.Generic.Dictionary.
Get: Keys(self: Dictionary[TKey,TValue]) -> KeyCollection
"""
Values=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection containing the values in the System.Collections.Generic.Dictionary.
Get: Values(self: Dictionary[TKey,TValue]) -> ValueCollection
"""
Enumerator=None
KeyCollection=None
ValueCollection=None
class EqualityComparer(object):
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return EqualityComparer()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Equals(self,*__args):
"""
Equals(self: EqualityComparer[T],x: T,y: T) -> bool
When overridden in a derived class,determines whether two objects of type T are equal.
x: The first object to compare.
y: The second object to compare.
Returns: true if the specified objects are equal; otherwise,false.
"""
pass
def GetHashCode(self,obj=None):
"""
GetHashCode(self: EqualityComparer[T],obj: T) -> int
When overridden in a derived class,serves as a hash function for the specified object for hashing algorithms and data structures,such as a hash table.
obj: The object for which to get a hash code.
Returns: A hash code for the specified object.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
class ICollection:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return ICollection()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,item):
"""
Add(self: ICollection[T],item: T)
Adds an item to the System.Collections.Generic.ICollection.
item: The object to add to the System.Collections.Generic.ICollection.
"""
pass
def Clear(self):
"""
Clear(self: ICollection[T])
Removes all items from the System.Collections.Generic.ICollection.
"""
pass
def Contains(self,item):
"""
Contains(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
"""
pass
def CopyTo(self,array,arrayIndex):
""" CopyTo(self: ICollection[T],array: Array[T],arrayIndex: int) """
pass
def Remove(self,item):
"""
Remove(self: ICollection[T],item: T) -> bool
Removes the first occurrence of a specific object from the System.Collections.Generic.ICollection.
item: The object to remove from the System.Collections.Generic.ICollection.
Returns: true if item was successfully removed from the System.Collections.Generic.ICollection; otherwise,false. This method also returns false if item is not found in the original
System.Collections.Generic.ICollection.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
""" __contains__[T](enumerable: IEnumerable[T],value: T) -> bool """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of elements contained in the System.Collections.Generic.ICollection.
Get: Count(self: ICollection[T]) -> int
"""
IsReadOnly=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the System.Collections.Generic.ICollection is read-only.
Get: IsReadOnly(self: ICollection[T]) -> bool
"""
class IComparer:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IComparer()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Compare(self,x,y):
"""
Compare(self: IComparer[T],x: T,y: T) -> int
Compares two objects and returns a value indicating whether one is less than,equal to,or greater than the other.
x: The first object to compare.
y: The second object to compare.
Returns: A signed integer that indicates the relative values of x and y,as shown in the following table.Value Meaning Less than zerox is less than y.Zerox equals y.Greater than
zerox is greater than y.
"""
pass
def __cmp__(self,*args):
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class IDictionary:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IDictionary()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,key,value):
"""
Add(self: IDictionary[TKey,TValue],key: TKey,value: TValue)
Adds an element with the provided key and value to the System.Collections.Generic.IDictionary.
key: The object to use as the key of the element to add.
value: The object to use as the value of the element to add.
"""
pass
def ContainsKey(self,key):
"""
ContainsKey(self: IDictionary[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.IDictionary contains an element with the specified key.
key: The key to locate in the System.Collections.Generic.IDictionary.
Returns: true if the System.Collections.Generic.IDictionary contains an element with the key; otherwise,false.
"""
pass
def Remove(self,key):
"""
Remove(self: IDictionary[TKey,TValue],key: TKey) -> bool
Removes the element with the specified key from the System.Collections.Generic.IDictionary.
key: The key of the element to remove.
Returns: true if the element is successfully removed; otherwise,false. This method also returns false if key was not found in the original System.Collections.Generic.IDictionary.
"""
pass
def TryGetValue(self,key,value):
""" TryGetValue(self: IDictionary[TKey,TValue],key: TKey) -> (bool,TValue) """
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
""" __contains__(self: ICollection[KeyValuePair[TKey,TValue]],item: KeyValuePair[TKey,TValue]) -> bool """
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Keys=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets an System.Collections.Generic.ICollection containing the keys of the System.Collections.Generic.IDictionary.
Get: Keys(self: IDictionary[TKey,TValue]) -> ICollection[TKey]
"""
Values=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets an System.Collections.Generic.ICollection containing the values in the System.Collections.Generic.IDictionary.
Get: Values(self: IDictionary[TKey,TValue]) -> ICollection[TValue]
"""
class IEnumerable:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IEnumerable()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def GetEnumerator(self):
"""
GetEnumerator(self: IEnumerable[T]) -> IEnumerator[T]
Returns an enumerator that iterates through the collection.
Returns: A System.Collections.Generic.IEnumerator that can be used to iterate through the collection.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class IEnumerator:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IEnumerator()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def next(self,*args):
""" next(self: object) -> object """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__[T](self: IEnumerator[T]) -> object """
pass
Current=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the element in the collection at the current position of the enumerator.
Get: Current(self: IEnumerator[T]) -> T
"""
class IEqualityComparer:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IEqualityComparer()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Equals(self,x,y):
"""
Equals(self: IEqualityComparer[T],x: T,y: T) -> bool
Determines whether the specified objects are equal.
x: The first object of type T to compare.
y: The second object of type T to compare.
Returns: true if the specified objects are equal; otherwise,false.
"""
pass
def GetHashCode(self,obj):
"""
GetHashCode(self: IEqualityComparer[T],obj: T) -> int
Returns a hash code for the specified object.
obj: The System.Object for which a hash code is to be returned.
Returns: A hash code for the specified object.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class IList:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IList()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def IndexOf(self,item):
"""
IndexOf(self: IList[T],item: T) -> int
Determines the index of a specific item in the System.Collections.Generic.IList.
item: The object to locate in the System.Collections.Generic.IList.
Returns: The index of item if found in the list; otherwise,-1.
"""
pass
def Insert(self,index,item):
"""
Insert(self: IList[T],index: int,item: T)
Inserts an item to the System.Collections.Generic.IList at the specified index.
index: The zero-based index at which item should be inserted.
item: The object to insert into the System.Collections.Generic.IList.
"""
pass
def RemoveAt(self,index):
"""
RemoveAt(self: IList[T],index: int)
Removes the System.Collections.Generic.IList item at the specified index.
index: The zero-based index of the item to remove.
"""
pass
def __contains__(self,*args):
"""
__contains__(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
class IReadOnlyCollection:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IReadOnlyCollection()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def __contains__(self,*args):
""" __contains__[T](enumerable: IEnumerable[T],value: T) -> bool """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: IReadOnlyCollection[T]) -> int
"""
class IReadOnlyDictionary:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IReadOnlyDictionary()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def ContainsKey(self,key):
""" ContainsKey(self: IReadOnlyDictionary[TKey,TValue],key: TKey) -> bool """
pass
def TryGetValue(self,key,value):
""" TryGetValue(self: IReadOnlyDictionary[TKey,TValue],key: TKey) -> (bool,TValue) """
pass
def __contains__(self,*args):
""" __contains__[KeyValuePair`2](enumerable: IEnumerable[KeyValuePair[TKey,TValue]],value: KeyValuePair[TKey,TValue]) -> bool """
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
Keys=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Keys(self: IReadOnlyDictionary[TKey,TValue]) -> IEnumerable[TKey]
"""
Values=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Values(self: IReadOnlyDictionary[TKey,TValue]) -> IEnumerable[TValue]
"""
class IReadOnlyList:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return IReadOnlyList()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def __contains__(self,*args):
""" __contains__[T](enumerable: IEnumerable[T],value: T) -> bool """
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
class ISet:
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return ISet()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,item):
"""
Add(self: ISet[T],item: T) -> bool
Adds an element to the current set and returns a value to indicate if the element was successfully added.
item: The element to add to the set.
Returns: true if the element is added to the set; false if the element is already in the set.
"""
pass
def ExceptWith(self,other):
"""
ExceptWith(self: ISet[T],other: IEnumerable[T])
Removes all elements in the specified collection from the current set.
other: The collection of items to remove from the set.
"""
pass
def IntersectWith(self,other):
"""
IntersectWith(self: ISet[T],other: IEnumerable[T])
Modifies the current set so that it contains only elements that are also in a specified collection.
other: The collection to compare to the current set.
"""
pass
def IsProperSubsetOf(self,other):
"""
IsProperSubsetOf(self: ISet[T],other: IEnumerable[T]) -> bool
Determines whether the current set is a proper (strict) subset of a specified collection.
other: The collection to compare to the current set.
Returns: true if the current set is a proper subset of other; otherwise,false.
"""
pass
def IsProperSupersetOf(self,other):
"""
IsProperSupersetOf(self: ISet[T],other: IEnumerable[T]) -> bool
Determines whether the current set is a proper (strict) superset of a specified collection.
other: The collection to compare to the current set.
Returns: true if the current set is a proper superset of other; otherwise,false.
"""
pass
def IsSubsetOf(self,other):
"""
IsSubsetOf(self: ISet[T],other: IEnumerable[T]) -> bool
Determines whether a set is a subset of a specified collection.
other: The collection to compare to the current set.
Returns: true if the current set is a subset of other; otherwise,false.
"""
pass
def IsSupersetOf(self,other):
"""
IsSupersetOf(self: ISet[T],other: IEnumerable[T]) -> bool
Determines whether the current set is a superset of a specified collection.
other: The collection to compare to the current set.
Returns: true if the current set is a superset of other; otherwise,false.
"""
pass
def Overlaps(self,other):
"""
Overlaps(self: ISet[T],other: IEnumerable[T]) -> bool
Determines whether the current set overlaps with the specified collection.
other: The collection to compare to the current set.
Returns: true if the current set and other share at least one common element; otherwise,false.
"""
pass
def SetEquals(self,other):
"""
SetEquals(self: ISet[T],other: IEnumerable[T]) -> bool
Determines whether the current set and the specified collection contain the same elements.
other: The collection to compare to the current set.
Returns: true if the current set is equal to other; otherwise,false.
"""
pass
def SymmetricExceptWith(self,other):
"""
SymmetricExceptWith(self: ISet[T],other: IEnumerable[T])
Modifies the current set so that it contains only elements that are present either in the current set or in the specified collection,but not both.
other: The collection to compare to the current set.
"""
pass
def UnionWith(self,other):
"""
UnionWith(self: ISet[T],other: IEnumerable[T])
Modifies the current set so that it contains all elements that are present in both the current set and in the specified collection.
other: The collection to compare to the current set.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
"""
__contains__(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
class KeyNotFoundException(SystemException):
"""
The exception that is thrown when the key specified for accessing an element in a collection does not match any key in the collection.
KeyNotFoundException()
KeyNotFoundException(message: str)
KeyNotFoundException(message: str,innerException: Exception)
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return KeyNotFoundException()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,message=None,innerException=None):
"""
__new__(cls: type)
__new__(cls: type,message: str)
__new__(cls: type,message: str,innerException: Exception)
__new__(cls: type,info: SerializationInfo,context: StreamingContext)
"""
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
SerializeObjectState=None
class KeyValuePair(object):
""" KeyValuePair[TKey,TValue](key: TKey,value: TValue) """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return KeyValuePair()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def ToString(self):
"""
ToString(self: KeyValuePair[TKey,TValue]) -> str
Returns a string representation of the System.Collections.Generic.KeyValuePair,using the string representations of the key and value.
Returns: A string representation of the System.Collections.Generic.KeyValuePair,which includes the string representations of the key and value.
"""
pass
@staticmethod
def __new__(self,key,value):
"""
__new__[KeyValuePair`2]() -> KeyValuePair[TKey,TValue]
__new__(cls: type,key: TKey,value: TValue)
"""
pass
Key=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the key in the key/value pair.
Get: Key(self: KeyValuePair[TKey,TValue]) -> TKey
"""
Value=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the value in the key/value pair.
Get: Value(self: KeyValuePair[TKey,TValue]) -> TValue
"""
class LinkedList(object):
"""
LinkedList[T]()
LinkedList[T](collection: IEnumerable[T])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return LinkedList()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def AddAfter(self,node,*__args):
"""
AddAfter(self: LinkedList[T],node: LinkedListNode[T],newNode: LinkedListNode[T])
Adds the specified new node after the specified existing node in the System.Collections.Generic.LinkedList.
node: The System.Collections.Generic.LinkedListNode after which to insert newNode.
newNode: The new System.Collections.Generic.LinkedListNode to add to the System.Collections.Generic.LinkedList.
AddAfter(self: LinkedList[T],node: LinkedListNode[T],value: T) -> LinkedListNode[T]
Adds a new node containing the specified value after the specified existing node in the System.Collections.Generic.LinkedList.
node: The System.Collections.Generic.LinkedListNode after which to insert a new System.Collections.Generic.LinkedListNode containing value.
value: The value to add to the System.Collections.Generic.LinkedList.
Returns: The new System.Collections.Generic.LinkedListNode containing value.
"""
pass
def AddBefore(self,node,*__args):
"""
AddBefore(self: LinkedList[T],node: LinkedListNode[T],value: T) -> LinkedListNode[T]
Adds a new node containing the specified value before the specified existing node in the System.Collections.Generic.LinkedList.
node: The System.Collections.Generic.LinkedListNode before which to insert a new System.Collections.Generic.LinkedListNode containing value.
value: The value to add to the System.Collections.Generic.LinkedList.
Returns: The new System.Collections.Generic.LinkedListNode containing value.
AddBefore(self: LinkedList[T],node: LinkedListNode[T],newNode: LinkedListNode[T])
Adds the specified new node before the specified existing node in the System.Collections.Generic.LinkedList.
node: The System.Collections.Generic.LinkedListNode before which to insert newNode.
newNode: The new System.Collections.Generic.LinkedListNode to add to the System.Collections.Generic.LinkedList.
"""
pass
def AddFirst(self,*__args):
"""
AddFirst(self: LinkedList[T],value: T) -> LinkedListNode[T]
Adds a new node containing the specified value at the start of the System.Collections.Generic.LinkedList.
value: The value to add at the start of the System.Collections.Generic.LinkedList.
Returns: The new System.Collections.Generic.LinkedListNode containing value.
AddFirst(self: LinkedList[T],node: LinkedListNode[T])
Adds the specified new node at the start of the System.Collections.Generic.LinkedList.
node: The new System.Collections.Generic.LinkedListNode to add at the start of the System.Collections.Generic.LinkedList.
"""
pass
def AddLast(self,*__args):
"""
AddLast(self: LinkedList[T],value: T) -> LinkedListNode[T]
Adds a new node containing the specified value at the end of the System.Collections.Generic.LinkedList.
value: The value to add at the end of the System.Collections.Generic.LinkedList.
Returns: The new System.Collections.Generic.LinkedListNode containing value.
AddLast(self: LinkedList[T],node: LinkedListNode[T])
Adds the specified new node at the end of the System.Collections.Generic.LinkedList.
node: The new System.Collections.Generic.LinkedListNode to add at the end of the System.Collections.Generic.LinkedList.
"""
pass
def Clear(self):
"""
Clear(self: LinkedList[T])
Removes all nodes from the System.Collections.Generic.LinkedList.
"""
pass
def Contains(self,value):
"""
Contains(self: LinkedList[T],value: T) -> bool
Determines whether a value is in the System.Collections.Generic.LinkedList.
value: The value to locate in the System.Collections.Generic.LinkedList. The value can be null for reference types.
Returns: true if value is found in the System.Collections.Generic.LinkedList; otherwise,false.
"""
pass
def CopyTo(self,array,index):
""" CopyTo(self: LinkedList[T],array: Array[T],index: int) """
pass
def Find(self,value):
"""
Find(self: LinkedList[T],value: T) -> LinkedListNode[T]
Finds the first node that contains the specified value.
value: The value to locate in the System.Collections.Generic.LinkedList.
Returns: The first System.Collections.Generic.LinkedListNode that contains the specified value,if found; otherwise,null.
"""
pass
def FindLast(self,value):
"""
FindLast(self: LinkedList[T],value: T) -> LinkedListNode[T]
Finds the last node that contains the specified value.
value: The value to locate in the System.Collections.Generic.LinkedList.
Returns: The last System.Collections.Generic.LinkedListNode that contains the specified value,if found; otherwise,null.
"""
pass
def GetEnumerator(self):
"""
GetEnumerator(self: LinkedList[T]) -> Enumerator
Returns an enumerator that iterates through the System.Collections.Generic.LinkedList.
Returns: An System.Collections.Generic.LinkedList for the System.Collections.Generic.LinkedList.
"""
pass
def GetObjectData(self,info,context):
"""
GetObjectData(self: LinkedList[T],info: SerializationInfo,context: StreamingContext)
Implements the System.Runtime.Serialization.ISerializable interface and returns the data needed to serialize the System.Collections.Generic.LinkedList instance.
info: A System.Runtime.Serialization.SerializationInfo object that contains the information required to serialize the System.Collections.Generic.LinkedList instance.
context: A System.Runtime.Serialization.StreamingContext object that contains the source and destination of the serialized stream associated with the
System.Collections.Generic.LinkedList instance.
"""
pass
def OnDeserialization(self,sender):
"""
OnDeserialization(self: LinkedList[T],sender: object)
Implements the System.Runtime.Serialization.ISerializable interface and raises the deserialization event when the deserialization is complete.
sender: The source of the deserialization event.
"""
pass
def Remove(self,*__args):
"""
Remove(self: LinkedList[T],value: T) -> bool
Removes the first occurrence of the specified value from the System.Collections.Generic.LinkedList.
value: The value to remove from the System.Collections.Generic.LinkedList.
Returns: true if the element containing value is successfully removed; otherwise,false. This method also returns false if value was not found in the original
System.Collections.Generic.LinkedList.
Remove(self: LinkedList[T],node: LinkedListNode[T])
Removes the specified node from the System.Collections.Generic.LinkedList.
node: The System.Collections.Generic.LinkedListNode to remove from the System.Collections.Generic.LinkedList.
"""
pass
def RemoveFirst(self):
"""
RemoveFirst(self: LinkedList[T])
Removes the node at the start of the System.Collections.Generic.LinkedList.
"""
pass
def RemoveLast(self):
"""
RemoveLast(self: LinkedList[T])
Removes the node at the end of the System.Collections.Generic.LinkedList.
"""
pass
def __contains__(self,*args):
"""
__contains__(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,collection=None):
"""
__new__(cls: type)
__new__(cls: type,collection: IEnumerable[T])
__new__(cls: type,info: SerializationInfo,context: StreamingContext)
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of nodes actually contained in the System.Collections.Generic.LinkedList.
Get: Count(self: LinkedList[T]) -> int
"""
First=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the first node of the System.Collections.Generic.LinkedList.
Get: First(self: LinkedList[T]) -> LinkedListNode[T]
"""
Last=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the last node of the System.Collections.Generic.LinkedList.
Get: Last(self: LinkedList[T]) -> LinkedListNode[T]
"""
Enumerator=None
class LinkedListNode(object):
""" LinkedListNode[T](value: T) """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return LinkedListNode()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
@staticmethod
def __new__(self,value):
""" __new__(cls: type,value: T) """
pass
List=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Collections.Generic.LinkedList that the System.Collections.Generic.LinkedListNode belongs to.
Get: List(self: LinkedListNode[T]) -> LinkedList[T]
"""
Next=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the next node in the System.Collections.Generic.LinkedList.
Get: Next(self: LinkedListNode[T]) -> LinkedListNode[T]
"""
Previous=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the previous node in the System.Collections.Generic.LinkedList.
Get: Previous(self: LinkedListNode[T]) -> LinkedListNode[T]
"""
Value=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the value contained in the node.
Get: Value(self: LinkedListNode[T]) -> T
Set: Value(self: LinkedListNode[T])=value
"""
class List(object):
"""
List[T]()
List[T](capacity: int)
List[T](collection: IEnumerable[T])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return List()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,item):
"""
Add(self: List[T],item: T)
Adds an object to the end of the System.Collections.Generic.List.
item: The object to be added to the end of the System.Collections.Generic.List. The value can be null for reference types.
"""
pass
def AddRange(self,collection):
"""
AddRange(self: List[T],collection: IEnumerable[T])
Adds the elements of the specified collection to the end of the System.Collections.Generic.List.
collection: The collection whose elements should be added to the end of the System.Collections.Generic.List. The collection itself cannot be null,but it can contain elements that are
null,if type T is a reference type.
"""
pass
def AsReadOnly(self):
"""
AsReadOnly(self: List[T]) -> ReadOnlyCollection[T]
Returns a read-only System.Collections.Generic.IList wrapper for the current collection.
Returns: A System.Collections.ObjectModel.ReadOnlyCollection that acts as a read-only wrapper around the current System.Collections.Generic.List.
"""
pass
def BinarySearch(self,*__args):
"""
BinarySearch(self: List[T],index: int,count: int,item: T,comparer: IComparer[T]) -> int
Searches a range of elements in the sorted System.Collections.Generic.List for an element using the specified comparer and returns the zero-based index of the element.
index: The zero-based starting index of the range to search.
count: The length of the range to search.
item: The object to locate. The value can be null for reference types.
comparer: The System.Collections.Generic.IComparer implementation to use when comparing elements,or null to use the default comparer System.Collections.Generic.Comparer.
Returns: The zero-based index of item in the sorted System.Collections.Generic.List,if item is found; otherwise,a negative number that is the bitwise complement of the index of
the next element that is larger than item or,if there is no larger element,the bitwise complement of System.Collections.Generic.List.
BinarySearch(self: List[T],item: T) -> int
Searches the entire sorted System.Collections.Generic.List for an element using the default comparer and returns the zero-based index of the element.
item: The object to locate. The value can be null for reference types.
Returns: The zero-based index of item in the sorted System.Collections.Generic.List,if item is found; otherwise,a negative number that is the bitwise complement of the index of
the next element that is larger than item or,if there is no larger element,the bitwise complement of System.Collections.Generic.List.
BinarySearch(self: List[T],item: T,comparer: IComparer[T]) -> int
Searches the entire sorted System.Collections.Generic.List for an element using the specified comparer and returns the zero-based index of the element.
item: The object to locate. The value can be null for reference types.
comparer: The System.Collections.Generic.IComparer implementation to use when comparing elements.-or-null to use the default comparer System.Collections.Generic.Comparer.
Returns: The zero-based index of item in the sorted System.Collections.Generic.List,if item is found; otherwise,a negative number that is the bitwise complement of the index of
the next element that is larger than item or,if there is no larger element,the bitwise complement of System.Collections.Generic.List.
"""
pass
def Clear(self):
"""
Clear(self: List[T])
Removes all elements from the System.Collections.Generic.List.
"""
pass
def Contains(self,item):
"""
Contains(self: List[T],item: T) -> bool
Determines whether an element is in the System.Collections.Generic.List.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
Returns: true if item is found in the System.Collections.Generic.List; otherwise,false.
"""
pass
def ConvertAll(self,converter):
""" ConvertAll[TOutput](self: List[T],converter: Converter[T,TOutput]) -> List[TOutput] """
pass
def CopyTo(self,*__args):
""" CopyTo(self: List[T],index: int,array: Array[T],arrayIndex: int,count: int)CopyTo(self: List[T],array: Array[T])CopyTo(self: List[T],array: Array[T],arrayIndex: int) """
pass
def Exists(self,match):
"""
Exists(self: List[T],match: Predicate[T]) -> bool
Determines whether the System.Collections.Generic.List contains elements that match the conditions defined by the specified predicate.
match: The System.Predicate delegate that defines the conditions of the elements to search for.
Returns: true if the System.Collections.Generic.List contains one or more elements that match the conditions defined by the specified predicate; otherwise,false.
"""
pass
def Find(self,match):
"""
Find(self: List[T],match: Predicate[T]) -> T
Searches for an element that matches the conditions defined by the specified predicate,and returns the first occurrence within the entire System.Collections.Generic.List.
match: The System.Predicate delegate that defines the conditions of the element to search for.
Returns: The first element that matches the conditions defined by the specified predicate,if found; otherwise,the default value for type T.
"""
pass
def FindAll(self,match):
"""
FindAll(self: List[T],match: Predicate[T]) -> List[T]
Retrieves all the elements that match the conditions defined by the specified predicate.
match: The System.Predicate delegate that defines the conditions of the elements to search for.
Returns: A System.Collections.Generic.List containing all the elements that match the conditions defined by the specified predicate,if found; otherwise,an empty
System.Collections.Generic.List.
"""
pass
def FindIndex(self,*__args):
"""
FindIndex(self: List[T],match: Predicate[T]) -> int
Searches for an element that matches the conditions defined by the specified predicate,and returns the zero-based index of the first occurrence within the entire
System.Collections.Generic.List.
match: The System.Predicate delegate that defines the conditions of the element to search for.
FindIndex(self: List[T],startIndex: int,match: Predicate[T]) -> int
Searches for an element that matches the conditions defined by the specified predicate,and returns the zero-based index of the first occurrence within the range of
elements in the System.Collections.Generic.List that extends from the specified index to the last element.
startIndex: The zero-based starting index of the search.
match: The System.Predicate delegate that defines the conditions of the element to search for.
FindIndex(self: List[T],startIndex: int,count: int,match: Predicate[T]) -> int
Searches for an element that matches the conditions defined by the specified predicate,and returns the zero-based index of the first occurrence within the range of
elements in the System.Collections.Generic.List that starts at the specified index and contains the specified number of elements.
startIndex: The zero-based starting index of the search.
count: The number of elements in the section to search.
match: The System.Predicate delegate that defines the conditions of the element to search for.
"""
pass
def FindLast(self,match):
"""
FindLast(self: List[T],match: Predicate[T]) -> T
Searches for an element that matches the conditions defined by the specified predicate,and returns the last occurrence within the entire System.Collections.Generic.List.
match: The System.Predicate delegate that defines the conditions of the element to search for.
Returns: The last element that matches the conditions defined by the specified predicate,if found; otherwise,the default value for type T.
"""
pass
def FindLastIndex(self,*__args):
"""
FindLastIndex(self: List[T],match: Predicate[T]) -> int
Searches for an element that matches the conditions defined by the specified predicate,and returns the zero-based index of the last occurrence within the entire
System.Collections.Generic.List.
match: The System.Predicate delegate that defines the conditions of the element to search for.
FindLastIndex(self: List[T],startIndex: int,match: Predicate[T]) -> int
Searches for an element that matches the conditions defined by the specified predicate,and returns the zero-based index of the last occurrence within the range of elements
in the System.Collections.Generic.List that extends from the first element to the specified index.
startIndex: The zero-based starting index of the backward search.
match: The System.Predicate delegate that defines the conditions of the element to search for.
FindLastIndex(self: List[T],startIndex: int,count: int,match: Predicate[T]) -> int
Searches for an element that matches the conditions defined by the specified predicate,and returns the zero-based index of the last occurrence within the range of elements
in the System.Collections.Generic.List that contains the specified number of elements and ends at the specified index.
startIndex: The zero-based starting index of the backward search.
count: The number of elements in the section to search.
match: The System.Predicate delegate that defines the conditions of the element to search for.
"""
pass
def ForEach(self,action):
"""
ForEach(self: List[T],action: Action[T])
Performs the specified action on each element of the System.Collections.Generic.List.
action: The System.Action delegate to perform on each element of the System.Collections.Generic.List.
"""
pass
def GetEnumerator(self):
"""
GetEnumerator(self: List[T]) -> Enumerator
Returns an enumerator that iterates through the System.Collections.Generic.List.
Returns: A System.Collections.Generic.List for the System.Collections.Generic.List.
"""
pass
def GetRange(self,index,count):
"""
GetRange(self: List[T],index: int,count: int) -> List[T]
Creates a shallow copy of a range of elements in the source System.Collections.Generic.List.
index: The zero-based System.Collections.Generic.List index at which the range starts.
count: The number of elements in the range.
Returns: A shallow copy of a range of elements in the source System.Collections.Generic.List.
"""
pass
def IndexOf(self,item,index=None,count=None):
"""
IndexOf(self: List[T],item: T) -> int
Searches for the specified object and returns the zero-based index of the first occurrence within the entire System.Collections.Generic.List.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
IndexOf(self: List[T],item: T,index: int) -> int
Searches for the specified object and returns the zero-based index of the first occurrence within the range of elements in the System.Collections.Generic.List that extends
from the specified index to the last element.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
index: The zero-based starting index of the search. 0 (zero) is valid in an empty list.
Returns: The zero-based index of the first occurrence of item within the range of elements in the System.Collections.Generic.List that extends from index to the last element,if
IndexOf(self: List[T],item: T,index: int,count: int) -> int
Searches for the specified object and returns the zero-based index of the first occurrence within the range of elements in the System.Collections.Generic.List that starts
at the specified index and contains the specified number of elements.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
index: The zero-based starting index of the search. 0 (zero) is valid in an empty list.
count: The number of elements in the section to search.
Returns: The zero-based index of the first occurrence of item within the range of elements in the System.Collections.Generic.List that starts at index and contains count number of
"""
pass
def Insert(self,index,item):
"""
Insert(self: List[T],index: int,item: T)
Inserts an element into the System.Collections.Generic.List at the specified index.
index: The zero-based index at which item should be inserted.
item: The object to insert. The value can be null for reference types.
"""
pass
def InsertRange(self,index,collection):
"""
InsertRange(self: List[T],index: int,collection: IEnumerable[T])
Inserts the elements of a collection into the System.Collections.Generic.List at the specified index.
index: The zero-based index at which the new elements should be inserted.
collection: The collection whose elements should be inserted into the System.Collections.Generic.List. The collection itself cannot be null,but it can contain elements that are null,
if type T is a reference type.
"""
pass
def LastIndexOf(self,item,index=None,count=None):
"""
LastIndexOf(self: List[T],item: T) -> int
Searches for the specified object and returns the zero-based index of the last occurrence within the entire System.Collections.Generic.List.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
LastIndexOf(self: List[T],item: T,index: int) -> int
Searches for the specified object and returns the zero-based index of the last occurrence within the range of elements in the System.Collections.Generic.List that extends
from the first element to the specified index.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
index: The zero-based starting index of the backward search.
Returns: The zero-based index of the last occurrence of item within the range of elements in the System.Collections.Generic.List that extends from the first element to index,if
LastIndexOf(self: List[T],item: T,index: int,count: int) -> int
Searches for the specified object and returns the zero-based index of the last occurrence within the range of elements in the System.Collections.Generic.List that contains
the specified number of elements and ends at the specified index.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
index: The zero-based starting index of the backward search.
count: The number of elements in the section to search.
Returns: The zero-based index of the last occurrence of item within the range of elements in the System.Collections.Generic.List that contains count number of elements and ends at
"""
pass
def Remove(self,item):
"""
Remove(self: List[T],item: T) -> bool
Removes the first occurrence of a specific object from the System.Collections.Generic.List.
item: The object to remove from the System.Collections.Generic.List. The value can be null for reference types.
Returns: true if item is successfully removed; otherwise,false. This method also returns false if item was not found in the System.Collections.Generic.List.
"""
pass
def RemoveAll(self,match):
"""
RemoveAll(self: List[T],match: Predicate[T]) -> int
Removes all the elements that match the conditions defined by the specified predicate.
match: The System.Predicate delegate that defines the conditions of the elements to remove.
Returns: The number of elements removed from the System.Collections.Generic.List .
"""
pass
def RemoveAt(self,index):
"""
RemoveAt(self: List[T],index: int)
Removes the element at the specified index of the System.Collections.Generic.List.
index: The zero-based index of the element to remove.
"""
pass
def RemoveRange(self,index,count):
"""
RemoveRange(self: List[T],index: int,count: int)
Removes a range of elements from the System.Collections.Generic.List.
index: The zero-based starting index of the range of elements to remove.
count: The number of elements to remove.
"""
pass
def Reverse(self,index=None,count=None):
"""
Reverse(self: List[T],index: int,count: int)
Reverses the order of the elements in the specified range.
index: The zero-based starting index of the range to reverse.
count: The number of elements in the range to reverse.
Reverse(self: List[T])
Reverses the order of the elements in the entire System.Collections.Generic.List.
"""
pass
def Sort(self,*__args):
"""
Sort(self: List[T])
Sorts the elements in the entire System.Collections.Generic.List using the default comparer.
Sort(self: List[T],comparer: IComparer[T])
Sorts the elements in the entire System.Collections.Generic.List using the specified comparer.
comparer: The System.Collections.Generic.IComparer implementation to use when comparing elements,or null to use the default comparer System.Collections.Generic.Comparer.
Sort(self: List[T],index: int,count: int,comparer: IComparer[T])
Sorts the elements in a range of elements in System.Collections.Generic.List using the specified comparer.
index: The zero-based starting index of the range to sort.
count: The length of the range to sort.
comparer: The System.Collections.Generic.IComparer implementation to use when comparing elements,or null to use the default comparer System.Collections.Generic.Comparer.
Sort(self: List[T],comparison: Comparison[T])
Sorts the elements in the entire System.Collections.Generic.List using the specified System.Comparison.
comparison: The System.Comparison to use when comparing elements.
"""
pass
def ToArray(self):
"""
ToArray(self: List[T]) -> Array[T]
Copies the elements of the System.Collections.Generic.List to a new array.
Returns: An array containing copies of the elements of the System.Collections.Generic.List.
"""
pass
def TrimExcess(self):
"""
TrimExcess(self: List[T])
Sets the capacity to the actual number of elements in the System.Collections.Generic.List,if that number is less than a threshold value.
"""
pass
def TrueForAll(self,match):
"""
TrueForAll(self: List[T],match: Predicate[T]) -> bool
Determines whether every element in the System.Collections.Generic.List matches the conditions defined by the specified predicate.
match: The System.Predicate delegate that defines the conditions to check against the elements.
Returns: true if every element in the System.Collections.Generic.List matches the conditions defined by the specified predicate; otherwise,false. If the list has no elements,the
return value is true.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
"""
__contains__(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
__contains__(self: IList,value: object) -> bool
Determines whether the System.Collections.IList contains a specific value.
value: The object to locate in the System.Collections.IList.
Returns: true if the System.Object is found in the System.Collections.IList; otherwise,false.
"""
pass
def __delitem__(self,*args):
""" x.__delitem__(y) <==> del x[y]x.__delitem__(y) <==> del x[y]x.__delitem__(y) <==> del x[y] """
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __getslice__(self,*args):
"""
__getslice__(self: List[T],x: int,y: int) -> List[T]
__getslice__(self: List[T],x: int,y: int) -> List[T]
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,capacity: int)
__new__(cls: type,collection: IEnumerable[T])
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
"""
__repr__(self: List[T]) -> str
__repr__(self: List[T]) -> str
"""
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Capacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the total number of elements the internal data structure can hold without resizing.
Get: Capacity(self: List[T]) -> int
Set: Capacity(self: List[T])=value
"""
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of elements actually contained in the System.Collections.Generic.List.
Get: Count(self: List[T]) -> int
"""
Enumerator=None
class Queue(object):
"""
Queue[T]()
Queue[T](capacity: int)
Queue[T](collection: IEnumerable[T])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return Queue()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Clear(self):
"""
Clear(self: Queue[T])
Removes all objects from the System.Collections.Generic.Queue.
"""
pass
def Contains(self,item):
"""
Contains(self: Queue[T],item: T) -> bool
Determines whether an element is in the System.Collections.Generic.Queue.
item: The object to locate in the System.Collections.Generic.Queue. The value can be null for reference types.
Returns: true if item is found in the System.Collections.Generic.Queue; otherwise,false.
"""
pass
def CopyTo(self,array,arrayIndex):
""" CopyTo(self: Queue[T],array: Array[T],arrayIndex: int) """
pass
def Dequeue(self):
"""
Dequeue(self: Queue[T]) -> T
Removes and returns the object at the beginning of the System.Collections.Generic.Queue.
Returns: The object that is removed from the beginning of the System.Collections.Generic.Queue.
"""
pass
def Enqueue(self,item):
"""
Enqueue(self: Queue[T],item: T)
Adds an object to the end of the System.Collections.Generic.Queue.
item: The object to add to the System.Collections.Generic.Queue. The value can be null for reference types.
"""
pass
def GetEnumerator(self):
"""
GetEnumerator(self: Queue[T]) -> Enumerator
Returns an enumerator that iterates through the System.Collections.Generic.Queue.
Returns: An System.Collections.Generic.Queue for the System.Collections.Generic.Queue.
"""
pass
def Peek(self):
"""
Peek(self: Queue[T]) -> T
Returns the object at the beginning of the System.Collections.Generic.Queue without removing it.
Returns: The object at the beginning of the System.Collections.Generic.Queue.
"""
pass
def ToArray(self):
"""
ToArray(self: Queue[T]) -> Array[T]
Copies the System.Collections.Generic.Queue elements to a new array.
Returns: A new array containing elements copied from the System.Collections.Generic.Queue.
"""
pass
def TrimExcess(self):
"""
TrimExcess(self: Queue[T])
Sets the capacity to the actual number of elements in the System.Collections.Generic.Queue,if that number is less than 90 percent of current capacity.
"""
pass
def __contains__(self,*args):
""" __contains__[T](enumerable: IEnumerable[T],value: T) -> bool """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,capacity: int)
__new__(cls: type,collection: IEnumerable[T])
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of elements contained in the System.Collections.Generic.Queue.
Get: Count(self: Queue[T]) -> int
"""
Enumerator=None
class SortedDictionary(object):
"""
SortedDictionary[TKey,TValue]()
SortedDictionary[TKey,TValue](comparer: IComparer[TKey])
SortedDictionary[TKey,TValue](dictionary: IDictionary[TKey,TValue])
SortedDictionary[TKey,TValue](dictionary: IDictionary[TKey,TValue],comparer: IComparer[TKey])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return SortedDictionary()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,key,value):
"""
Add(self: SortedDictionary[TKey,TValue],key: TKey,value: TValue)
Adds an element with the specified key and value into the System.Collections.Generic.SortedDictionary.
key: The key of the element to add.
value: The value of the element to add. The value can be null for reference types.
"""
pass
def Clear(self):
"""
Clear(self: SortedDictionary[TKey,TValue])
Removes all elements from the System.Collections.Generic.SortedDictionary.
"""
pass
def ContainsKey(self,key):
"""
ContainsKey(self: SortedDictionary[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.SortedDictionary contains an element with the specified key.
key: The key to locate in the System.Collections.Generic.SortedDictionary.
Returns: true if the System.Collections.Generic.SortedDictionary contains an element with the specified key; otherwise,false.
"""
pass
def ContainsValue(self,value):
"""
ContainsValue(self: SortedDictionary[TKey,TValue],value: TValue) -> bool
Determines whether the System.Collections.Generic.SortedDictionary contains an element with the specified value.
value: The value to locate in the System.Collections.Generic.SortedDictionary. The value can be null for reference types.
Returns: true if the System.Collections.Generic.SortedDictionary contains an element with the specified value; otherwise,false.
"""
pass
def CopyTo(self,array,index):
""" CopyTo(self: SortedDictionary[TKey,TValue],array: Array[KeyValuePair[TKey,TValue]],index: int) """
pass
def GetEnumerator(self):
"""
GetEnumerator(self: SortedDictionary[TKey,TValue]) -> Enumerator
Returns an enumerator that iterates through the System.Collections.Generic.SortedDictionary.
Returns: A System.Collections.Generic.SortedDictionary for the System.Collections.Generic.SortedDictionary.
"""
pass
def Remove(self,key):
"""
Remove(self: SortedDictionary[TKey,TValue],key: TKey) -> bool
Removes the element with the specified key from the System.Collections.Generic.SortedDictionary.
key: The key of the element to remove.
Returns: true if the element is successfully removed; otherwise,false. This method also returns false if key is not found in the System.Collections.Generic.SortedDictionary.
"""
pass
def TryGetValue(self,key,value):
""" TryGetValue(self: SortedDictionary[TKey,TValue],key: TKey) -> (bool,TValue) """
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
"""
__contains__(self: IDictionary[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.IDictionary contains an element with the specified key.
key: The key to locate in the System.Collections.Generic.IDictionary.
Returns: true if the System.Collections.Generic.IDictionary contains an element with the key; otherwise,false.
__contains__(self: IDictionary,key: object) -> bool
Determines whether the System.Collections.IDictionary object contains an element with the specified key.
key: The key to locate in the System.Collections.IDictionary object.
Returns: true if the System.Collections.IDictionary contains an element with the key; otherwise,false.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,dictionary: IDictionary[TKey,TValue])
__new__(cls: type,dictionary: IDictionary[TKey,TValue],comparer: IComparer[TKey])
__new__(cls: type,comparer: IComparer[TKey])
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Comparer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Collections.Generic.IComparer used to order the elements of the System.Collections.Generic.SortedDictionary.
Get: Comparer(self: SortedDictionary[TKey,TValue]) -> IComparer[TKey]
"""
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of key/value pairs contained in the System.Collections.Generic.SortedDictionary.
Get: Count(self: SortedDictionary[TKey,TValue]) -> int
"""
Keys=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection containing the keys in the System.Collections.Generic.SortedDictionary.
Get: Keys(self: SortedDictionary[TKey,TValue]) -> KeyCollection
"""
Values=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection containing the values in the System.Collections.Generic.SortedDictionary.
Get: Values(self: SortedDictionary[TKey,TValue]) -> ValueCollection
"""
Enumerator=None
KeyCollection=None
ValueCollection=None
class SortedList(object):
"""
SortedList[TKey,TValue]()
SortedList[TKey,TValue](capacity: int)
SortedList[TKey,TValue](comparer: IComparer[TKey])
SortedList[TKey,TValue](capacity: int,comparer: IComparer[TKey])
SortedList[TKey,TValue](dictionary: IDictionary[TKey,TValue])
SortedList[TKey,TValue](dictionary: IDictionary[TKey,TValue],comparer: IComparer[TKey])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return SortedList()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,key,value):
"""
Add(self: SortedList[TKey,TValue],key: TKey,value: TValue)
Adds an element with the specified key and value into the System.Collections.Generic.SortedList.
key: The key of the element to add.
value: The value of the element to add. The value can be null for reference types.
"""
pass
def Clear(self):
"""
Clear(self: SortedList[TKey,TValue])
Removes all elements from the System.Collections.Generic.SortedList.
"""
pass
def ContainsKey(self,key):
"""
ContainsKey(self: SortedList[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.SortedList contains a specific key.
key: The key to locate in the System.Collections.Generic.SortedList.
Returns: true if the System.Collections.Generic.SortedList contains an element with the specified key; otherwise,false.
"""
pass
def ContainsValue(self,value):
"""
ContainsValue(self: SortedList[TKey,TValue],value: TValue) -> bool
Determines whether the System.Collections.Generic.SortedList contains a specific value.
value: The value to locate in the System.Collections.Generic.SortedList. The value can be null for reference types.
Returns: true if the System.Collections.Generic.SortedList contains an element with the specified value; otherwise,false.
"""
pass
def GetEnumerator(self):
"""
GetEnumerator(self: SortedList[TKey,TValue]) -> IEnumerator[KeyValuePair[TKey,TValue]]
Returns an enumerator that iterates through the System.Collections.Generic.SortedList.
Returns: An System.Collections.Generic.IEnumerator of type System.Collections.Generic.KeyValuePair for the System.Collections.Generic.SortedList.
"""
pass
def IndexOfKey(self,key):
"""
IndexOfKey(self: SortedList[TKey,TValue],key: TKey) -> int
Searches for the specified key and returns the zero-based index within the entire System.Collections.Generic.SortedList.
key: The key to locate in the System.Collections.Generic.SortedList.
Returns: The zero-based index of key within the entire System.Collections.Generic.SortedList,if found; otherwise,-1.
"""
pass
def IndexOfValue(self,value):
"""
IndexOfValue(self: SortedList[TKey,TValue],value: TValue) -> int
Searches for the specified value and returns the zero-based index of the first occurrence within the entire System.Collections.Generic.SortedList.
value: The value to locate in the System.Collections.Generic.SortedList. The value can be null for reference types.
Returns: The zero-based index of the first occurrence of value within the entire System.Collections.Generic.SortedList,if found; otherwise,-1.
"""
pass
def Remove(self,key):
"""
Remove(self: SortedList[TKey,TValue],key: TKey) -> bool
Removes the element with the specified key from the System.Collections.Generic.SortedList.
key: The key of the element to remove.
Returns: true if the element is successfully removed; otherwise,false. This method also returns false if key was not found in the original System.Collections.Generic.SortedList.
"""
pass
def RemoveAt(self,index):
"""
RemoveAt(self: SortedList[TKey,TValue],index: int)
Removes the element at the specified index of the System.Collections.Generic.SortedList.
index: The zero-based index of the element to remove.
"""
pass
def TrimExcess(self):
"""
TrimExcess(self: SortedList[TKey,TValue])
Sets the capacity to the actual number of elements in the System.Collections.Generic.SortedList,if that number is less than 90 percent of current capacity.
"""
pass
def TryGetValue(self,key,value):
""" TryGetValue(self: SortedList[TKey,TValue],key: TKey) -> (bool,TValue) """
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
"""
__contains__(self: IDictionary[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.IDictionary contains an element with the specified key.
key: The key to locate in the System.Collections.Generic.IDictionary.
Returns: true if the System.Collections.Generic.IDictionary contains an element with the key; otherwise,false.
__contains__(self: IDictionary,key: object) -> bool
Determines whether the System.Collections.IDictionary object contains an element with the specified key.
key: The key to locate in the System.Collections.IDictionary object.
Returns: true if the System.Collections.IDictionary contains an element with the key; otherwise,false.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,capacity: int)
__new__(cls: type,comparer: IComparer[TKey])
__new__(cls: type,capacity: int,comparer: IComparer[TKey])
__new__(cls: type,dictionary: IDictionary[TKey,TValue])
__new__(cls: type,dictionary: IDictionary[TKey,TValue],comparer: IComparer[TKey])
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Capacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the number of elements that the System.Collections.Generic.SortedList can contain.
Get: Capacity(self: SortedList[TKey,TValue]) -> int
Set: Capacity(self: SortedList[TKey,TValue])=value
"""
Comparer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Collections.Generic.IComparer for the sorted list.
Get: Comparer(self: SortedList[TKey,TValue]) -> IComparer[TKey]
"""
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of key/value pairs contained in the System.Collections.Generic.SortedList.
Get: Count(self: SortedList[TKey,TValue]) -> int
"""
Keys=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection containing the keys in the System.Collections.Generic.SortedList.
Get: Keys(self: SortedList[TKey,TValue]) -> IList[TKey]
"""
Values=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection containing the values in the System.Collections.Generic.SortedList.
Get: Values(self: SortedList[TKey,TValue]) -> IList[TValue]
"""
class SortedSet(object):
"""
SortedSet[T]()
SortedSet[T](collection: IEnumerable[T])
SortedSet[T](collection: IEnumerable[T],comparer: IComparer[T])
SortedSet[T](comparer: IComparer[T])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return SortedSet()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,item):
"""
Add(self: SortedSet[T],item: T) -> bool
Adds an element to the set and returns a value that indicates if it was successfully added.
item: The element to add to the set.
Returns: true if item is added to the set; otherwise,false.
"""
pass
def Clear(self):
"""
Clear(self: SortedSet[T])
Removes all elements from the set.
"""
pass
def Contains(self,item):
"""
Contains(self: SortedSet[T],item: T) -> bool
Determines whether the set contains a specific element.
item: The element to locate in the set.
Returns: true if the set contains item; otherwise,false.
"""
pass
def CopyTo(self,array,index=None,count=None):
""" CopyTo(self: SortedSet[T],array: Array[T])CopyTo(self: SortedSet[T],array: Array[T],index: int)CopyTo(self: SortedSet[T],array: Array[T],index: int,count: int) """
pass
@staticmethod
def CreateSetComparer(memberEqualityComparer=None):
"""
CreateSetComparer() -> IEqualityComparer[SortedSet[T]]
Returns an System.Collections.IEqualityComparer object that can be used to create a collection that contains individual sets.
Returns: A comparer for creating a collection of sets.
CreateSetComparer(memberEqualityComparer: IEqualityComparer[T]) -> IEqualityComparer[SortedSet[T]]
Returns an System.Collections.IEqualityComparer object,according to a specified comparer,that can be used to create a collection that contains individual sets.
memberEqualityComparer: The comparer to use for creating the returned comparer.
Returns: A comparer for creating a collection of sets.
"""
pass
def ExceptWith(self,other):
"""
ExceptWith(self: SortedSet[T],other: IEnumerable[T])
Removes all elements that are in a specified collection from the current System.Collections.Generic.SortedSet object.
other: The collection of items to remove from the System.Collections.Generic.SortedSet object.
"""
pass
def GetEnumerator(self):
"""
GetEnumerator(self: SortedSet[T]) -> Enumerator
Returns an enumerator that iterates through the System.Collections.Generic.SortedSet.
Returns: An enumerator that iterates through the System.Collections.Generic.SortedSet.
"""
pass
def GetObjectData(self,*args):
"""
GetObjectData(self: SortedSet[T],info: SerializationInfo,context: StreamingContext)
Implements the System.Runtime.Serialization.ISerializable interface and returns the data that you must have to serialize a System.Collections.Generic.SortedSet object.
info: A System.Runtime.Serialization.SerializationInfo object that contains the information that is required to serialize the System.Collections.Generic.SortedSet object.
context: A System.Runtime.Serialization.StreamingContext structure that contains the source and destination of the serialized stream associated with the
System.Collections.Generic.SortedSet object.
"""
pass
def GetViewBetween(self,lowerValue,upperValue):
"""
GetViewBetween(self: SortedSet[T],lowerValue: T,upperValue: T) -> SortedSet[T]
Returns a view of a subset in a System.Collections.Generic.SortedSet.
lowerValue: The lowest desired value in the view.
upperValue: The highest desired value in the view.
Returns: A subset view that contains only the values in the specified range.
"""
pass
def IntersectWith(self,other):
"""
IntersectWith(self: SortedSet[T],other: IEnumerable[T])
Modifies the current System.Collections.Generic.SortedSet object so that it contains only elements that are also in a specified collection.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
"""
pass
def IsProperSubsetOf(self,other):
"""
IsProperSubsetOf(self: SortedSet[T],other: IEnumerable[T]) -> bool
Determines whether a System.Collections.Generic.SortedSet object is a proper subset of the specified collection.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
Returns: true if the System.Collections.Generic.SortedSet object is a proper subset of other; otherwise,false.
"""
pass
def IsProperSupersetOf(self,other):
"""
IsProperSupersetOf(self: SortedSet[T],other: IEnumerable[T]) -> bool
Determines whether a System.Collections.Generic.SortedSet object is a proper superset of the specified collection.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
Returns: true if the System.Collections.Generic.SortedSet object is a proper superset of other; otherwise,false.
"""
pass
def IsSubsetOf(self,other):
"""
IsSubsetOf(self: SortedSet[T],other: IEnumerable[T]) -> bool
Determines whether a System.Collections.Generic.SortedSet object is a subset of the specified collection.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
Returns: true if the current System.Collections.Generic.SortedSet object is a subset of other; otherwise,false.
"""
pass
def IsSupersetOf(self,other):
"""
IsSupersetOf(self: SortedSet[T],other: IEnumerable[T]) -> bool
Determines whether a System.Collections.Generic.SortedSet object is a superset of the specified collection.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
Returns: true if the System.Collections.Generic.SortedSet object is a superset of other; otherwise,false.
"""
pass
def OnDeserialization(self,*args):
"""
OnDeserialization(self: SortedSet[T],sender: object)
Implements the System.Runtime.Serialization.ISerializable interface,and raises the deserialization event when the deserialization is completed.
sender: The source of the deserialization event.
"""
pass
def Overlaps(self,other):
"""
Overlaps(self: SortedSet[T],other: IEnumerable[T]) -> bool
Determines whether the current System.Collections.Generic.SortedSet object and a specified collection share common elements.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
Returns: true if the System.Collections.Generic.SortedSet object and other share at least one common element; otherwise,false.
"""
pass
def Remove(self,item):
"""
Remove(self: SortedSet[T],item: T) -> bool
Removes a specified item from the System.Collections.Generic.SortedSet.
item: The element to remove.
Returns: true if the element is found and successfully removed; otherwise,false.
"""
pass
def RemoveWhere(self,match):
"""
RemoveWhere(self: SortedSet[T],match: Predicate[T]) -> int
Removes all elements that match the conditions defined by the specified predicate from a System.Collections.Generic.SortedSet.
match: The delegate that defines the conditions of the elements to remove.
Returns: The number of elements that were removed from the System.Collections.Generic.SortedSet collection..
"""
pass
def Reverse(self):
"""
Reverse(self: SortedSet[T]) -> IEnumerable[T]
Returns an System.Collections.Generic.IEnumerable that iterates over the System.Collections.Generic.SortedSet in reverse order.
Returns: An enumerator that iterates over the System.Collections.Generic.SortedSet in reverse order.
"""
pass
def SetEquals(self,other):
"""
SetEquals(self: SortedSet[T],other: IEnumerable[T]) -> bool
Determines whether the current System.Collections.Generic.SortedSet object and the specified collection contain the same elements.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
Returns: true if the current System.Collections.Generic.SortedSet object is equal to other; otherwise,false.
"""
pass
def SymmetricExceptWith(self,other):
"""
SymmetricExceptWith(self: SortedSet[T],other: IEnumerable[T])
Modifies the current System.Collections.Generic.SortedSet object so that it contains only elements that are present either in the current object or in the specified
collection,but not both.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
"""
pass
def TryGetValue(self,equalValue,actualValue):
""" TryGetValue(self: SortedSet[T],equalValue: T) -> (bool,T) """
pass
def UnionWith(self,other):
"""
UnionWith(self: SortedSet[T],other: IEnumerable[T])
Modifies the current System.Collections.Generic.SortedSet object so that it contains all elements that are present in both the current object and in the specified
collection.
other: The collection to compare to the current System.Collections.Generic.SortedSet object.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
"""
__contains__(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,comparer: IComparer[T])
__new__(cls: type,collection: IEnumerable[T])
__new__(cls: type,collection: IEnumerable[T],comparer: IComparer[T])
__new__(cls: type,info: SerializationInfo,context: StreamingContext)
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Comparer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Collections.Generic.IEqualityComparer object that is used to determine equality for the values in the System.Collections.Generic.SortedSet.
Get: Comparer(self: SortedSet[T]) -> IComparer[T]
"""
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of elements in the System.Collections.Generic.SortedSet.
Get: Count(self: SortedSet[T]) -> int
"""
Max=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the maximum value in the System.Collections.Generic.SortedSet,as defined by the comparer.
Get: Max(self: SortedSet[T]) -> T
"""
Min=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the minimum value in the System.Collections.Generic.SortedSet,as defined by the comparer.
Get: Min(self: SortedSet[T]) -> T
"""
Enumerator=None
class Stack(object):
"""
Stack[T]()
Stack[T](capacity: int)
Stack[T](collection: IEnumerable[T])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return Stack()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Clear(self):
"""
Clear(self: Stack[T])
Removes all objects from the System.Collections.Generic.Stack.
"""
pass
def Contains(self,item):
"""
Contains(self: Stack[T],item: T) -> bool
Determines whether an element is in the System.Collections.Generic.Stack.
item: The object to locate in the System.Collections.Generic.Stack. The value can be null for reference types.
Returns: true if item is found in the System.Collections.Generic.Stack; otherwise,false.
"""
pass
def CopyTo(self,array,arrayIndex):
""" CopyTo(self: Stack[T],array: Array[T],arrayIndex: int) """
pass
def GetEnumerator(self):
"""
GetEnumerator(self: Stack[T]) -> Enumerator
Returns an enumerator for the System.Collections.Generic.Stack.
Returns: An System.Collections.Generic.Stack for the System.Collections.Generic.Stack.
"""
pass
def Peek(self):
"""
Peek(self: Stack[T]) -> T
Returns the object at the top of the System.Collections.Generic.Stack without removing it.
Returns: The object at the top of the System.Collections.Generic.Stack.
"""
pass
def Pop(self):
"""
Pop(self: Stack[T]) -> T
Removes and returns the object at the top of the System.Collections.Generic.Stack.
Returns: The object removed from the top of the System.Collections.Generic.Stack.
"""
pass
def Push(self,item):
"""
Push(self: Stack[T],item: T)
Inserts an object at the top of the System.Collections.Generic.Stack.
item: The object to push onto the System.Collections.Generic.Stack. The value can be null for reference types.
"""
pass
def ToArray(self):
"""
ToArray(self: Stack[T]) -> Array[T]
Copies the System.Collections.Generic.Stack to a new array.
Returns: A new array containing copies of the elements of the System.Collections.Generic.Stack.
"""
pass
def TrimExcess(self):
"""
TrimExcess(self: Stack[T])
Sets the capacity to the actual number of elements in the System.Collections.Generic.Stack,if that number is less than 90 percent of current capacity.
"""
pass
def __contains__(self,*args):
""" __contains__[T](enumerable: IEnumerable[T],value: T) -> bool """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,capacity: int)
__new__(cls: type,collection: IEnumerable[T])
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of elements contained in the System.Collections.Generic.Stack.
Get: Count(self: Stack[T]) -> int
"""
Enumerator=None
| 38.997141
| 215
| 0.704824
| 12,628
| 95,465
| 5.190054
| 0.036427
| 0.091562
| 0.123772
| 0.102991
| 0.881904
| 0.847803
| 0.810757
| 0.753387
| 0.718889
| 0.686817
| 0
| 0.000646
| 0.18919
| 95,465
| 2,447
| 216
| 39.013077
| 0.84608
| 0.691154
| 0
| 0.829817
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.421941
| false
| 0.388186
| 0
| 0
| 0.59353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
e3cfd2c29a38414d656baa5a9033ad174a5ed2d8
| 68,626
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_locality/oldstuff/cmp_soplexmcfcalculixgcc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_locality/oldstuff/cmp_soplexmcfcalculixgcc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_locality/oldstuff/cmp_soplexmcfcalculixgcc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.271366,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.415832,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.46466,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.558538,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.967186,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.554708,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.08043,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.327539,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 8.15445,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.276705,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0202474,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.248042,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.149742,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.524747,
'Execution Unit/Register Files/Runtime Dynamic': 0.16999,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.673715,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.35691,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 4.42854,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00216522,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00216522,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00188591,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000730065,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00215106,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00836741,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.02076,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.143951,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.392347,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.488922,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.05435,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.100381,
'L2/Runtime Dynamic': 0.011069,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.15944,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.41608,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0945441,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.094544,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.60772,
'Load Store Unit/Runtime Dynamic': 1.97688,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.23313,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.466259,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0827384,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0841492,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.064606,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.701532,
'Memory Management Unit/Runtime Dynamic': 0.148755,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 27.0945,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.965362,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.040177,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.267353,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.27289,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 8.89248,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0678429,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.255975,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.36617,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.199851,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.322352,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.162713,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.684916,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.172433,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.8056,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0691773,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00838265,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0860248,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0619948,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.155202,
'Execution Unit/Register Files/Runtime Dynamic': 0.0703775,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.198195,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.429107,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.84575,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00173363,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00173363,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00155291,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000624627,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00089056,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00591073,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0150886,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0595972,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.79089,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.188024,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.202419,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.19339,
'Instruction Fetch Unit/Runtime Dynamic': 0.471039,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0433252,
'L2/Runtime Dynamic': 0.00726137,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.67987,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.702655,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0466765,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0466766,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.90029,
'Load Store Unit/Runtime Dynamic': 0.979524,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.115096,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.230193,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.040848,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0413981,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.235704,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0311221,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.461982,
'Memory Management Unit/Runtime Dynamic': 0.0725202,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.994,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.181974,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0112313,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0971536,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.290358,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.66646,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.00404705,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.205867,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0198051,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0950077,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.153244,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0773524,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.325604,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.105624,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.06551,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.00374161,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00398505,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0304153,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0294719,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0341569,
'Execution Unit/Register Files/Runtime Dynamic': 0.0334569,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0650885,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.161385,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.13169,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00111229,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00111229,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000979816,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000385326,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000423366,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00362776,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0102711,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0283321,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.80216,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0920219,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0962285,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.10814,
'Instruction Fetch Unit/Runtime Dynamic': 0.230481,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0587611,
'L2/Runtime Dynamic': 0.0164962,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.92158,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.353745,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0221438,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0221438,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.02614,
'Load Store Unit/Runtime Dynamic': 0.485094,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0546029,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.109205,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0193787,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0202595,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.112052,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0150908,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.30145,
'Memory Management Unit/Runtime Dynamic': 0.0353503,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.1495,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.00984261,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00440627,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.048212,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0624609,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.96157,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0180218,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.216843,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0972951,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.11939,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.192571,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0972035,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.409164,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.12163,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.23224,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0183811,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00500774,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0429603,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0370353,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0613414,
'Execution Unit/Register Files/Runtime Dynamic': 0.0420431,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0950117,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.243071,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.3165,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000554715,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000554715,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000487131,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00019075,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000532015,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00212858,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00517654,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.035603,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.26466,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0729272,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.120924,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.59308,
'Instruction Fetch Unit/Runtime Dynamic': 0.236759,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0399408,
'L2/Runtime Dynamic': 0.00872626,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.46254,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.601365,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0396453,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0396453,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.64975,
'Load Store Unit/Runtime Dynamic': 0.836528,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0977585,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.195517,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0346948,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0352937,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.140808,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0119584,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.356517,
'Memory Management Unit/Runtime Dynamic': 0.0472521,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.461,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0483525,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00597497,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0612752,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.115603,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.56137,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 3.8894059222839283,
'Runtime Dynamic': 3.8894059222839283,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.390553,
'Runtime Dynamic': 0.19349,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 75.0896,
'Peak Power': 108.202,
'Runtime Dynamic': 17.2754,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 74.699,
'Total Cores/Runtime Dynamic': 17.0819,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.390553,
'Total L3s/Runtime Dynamic': 0.19349,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.083151
| 124
| 0.682161
| 8,082
| 68,626
| 5.786439
| 0.067805
| 0.123509
| 0.112903
| 0.093401
| 0.938139
| 0.93012
| 0.917483
| 0.885686
| 0.862464
| 0.841872
| 0
| 0.132225
| 0.224274
| 68,626
| 914
| 125
| 75.083151
| 0.746257
| 0
| 0
| 0.642232
| 0
| 0
| 0.657249
| 0.048086
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5836f7ff98521a535fc00357778e857b31166a19
| 412
|
py
|
Python
|
OpenMatch/models/__init__.py
|
jindavid/OpenMatch
|
a40b9f72735ecfe439b4c0851861a86410331a5b
|
[
"MIT"
] | null | null | null |
OpenMatch/models/__init__.py
|
jindavid/OpenMatch
|
a40b9f72735ecfe439b4c0851861a86410331a5b
|
[
"MIT"
] | null | null | null |
OpenMatch/models/__init__.py
|
jindavid/OpenMatch
|
a40b9f72735ecfe439b4c0851861a86410331a5b
|
[
"MIT"
] | null | null | null |
from OpenMatch.models.bert import Bert
from OpenMatch.models.bert_maxp import BertMaxP
from OpenMatch.models.conv_knrm import ConvKNRM
from OpenMatch.models.knrm import KNRM
from OpenMatch.models.tk import TK
from OpenMatch.models.edrm import EDRM
from OpenMatch.models.bert_global import BertGlobal
from OpenMatch.models.bert_global2 import BertGlobal2
from OpenMatch.models.bert_global_cat import BertGlobalCat
| 45.777778
| 58
| 0.871359
| 60
| 412
| 5.883333
| 0.3
| 0.331445
| 0.484419
| 0.325779
| 0.164306
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005305
| 0.084951
| 412
| 9
| 58
| 45.777778
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
545f293d5878208828f4c2b364c4cca8dc91fcff
| 126
|
py
|
Python
|
simmod/modification/builtin/__init__.py
|
MoritzTaylor/simmod
|
76b2186c39940ce2d08aa36f3d06bfe3640d6c00
|
[
"MIT"
] | 2
|
2021-07-05T14:08:09.000Z
|
2021-10-01T09:48:37.000Z
|
simmod/modification/builtin/__init__.py
|
MoritzTaylor/simmod
|
76b2186c39940ce2d08aa36f3d06bfe3640d6c00
|
[
"MIT"
] | null | null | null |
simmod/modification/builtin/__init__.py
|
MoritzTaylor/simmod
|
76b2186c39940ce2d08aa36f3d06bfe3640d6c00
|
[
"MIT"
] | null | null | null |
from simmod.modification.builtin.builtin_modifier import ActionModifier, ObservationModifier, RewardModifier, BuiltInModifier
| 63
| 125
| 0.896825
| 11
| 126
| 10.181818
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 126
| 1
| 126
| 126
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
548bb9fb4caf74dda261c858c2101a771850fc3c
| 4,206
|
py
|
Python
|
mm/models/shared/audio.py
|
JonasRSV/Friday
|
f959eff95ba7b11525f97099c8f5ea0e325face7
|
[
"MIT"
] | 5
|
2020-11-30T11:58:08.000Z
|
2021-07-03T08:34:33.000Z
|
mm/models/shared/audio.py
|
JonasRSV/Friday
|
f959eff95ba7b11525f97099c8f5ea0e325face7
|
[
"MIT"
] | 3
|
2021-01-18T13:11:43.000Z
|
2021-05-16T08:44:38.000Z
|
mm/models/shared/audio.py
|
JonasRSV/Friday
|
f959eff95ba7b11525f97099c8f5ea0e325face7
|
[
"MIT"
] | 2
|
2021-01-29T10:36:12.000Z
|
2021-05-07T16:47:57.000Z
|
"""This module implements extraction of logmel features from a raw audio signal"""
import tensorflow as tf
def normalize_audio(signal: tf.Tensor):
return tf.cast(signal, tf.float32) / 32768.0
def normalize_mfcc(mfcc: tf.Tensor) -> tf.Tensor:
return mfcc
def mfcc_feature(signal: tf.Tensor, coefficients: int,
frame_length=1024, frame_step=256,
fft_length=1024,
sample_rate=44100,
lower_edge_hertz=80.0,
upper_edge_hertz=7600.0,
num_mel_bins=40):
"""Computes 'coefficient' MFCC coefficient from the audio signal
Args:
signal: a batch float tensor [batch_size, clip_length] with values in [-1, 1] representing the audio signal
coefficients: The number of MFCC coefficients to extract
frame_length: Length of short time FFT frame
frame_step: Length of frame step
fft_length: Length of FFT time
sample_rate: Sample rate of audio signal
lower_edge_hertz: Lower-bound of frequencies to include in signal
upper_edge_hertz: Upper-bound of frequencies to include in signal
num_mel_bins: Bands in the mel spectrum
"""
# Short time fourier transform across the signal
stfts = tf.signal.stft(signal,
frame_length=frame_length,
frame_step=frame_step,
fft_length=fft_length)
# Turn into spectrograms
spectrograms = tf.abs(stfts)
# Calculate mel features
num_spectrogram_bins = stfts.shape[-1].value
linear_to_mel_weight_matrix = tf.signal.linear_to_mel_weight_matrix(
num_mel_bins=num_mel_bins, num_spectrogram_bins=num_spectrogram_bins,
sample_rate=sample_rate,
lower_edge_hertz=lower_edge_hertz,
upper_edge_hertz=upper_edge_hertz)
mel_spectrograms = tf.tensordot(
spectrograms, linear_to_mel_weight_matrix, 1)
mel_spectrograms.set_shape(spectrograms.shape[:-1].concatenate(
linear_to_mel_weight_matrix.shape[-1:]))
# Log-mel trick
log_mel_spectrograms = tf.math.log(mel_spectrograms + 1e-6)
# Extract MFCC's
return normalize_mfcc(tf.signal.mfccs_from_log_mel_spectrograms(
log_mel_spectrograms)[..., :coefficients])
def mel_spectrogram_feature(signal: tf.Tensor,
frame_length=1024,
frame_step=256,
fft_length=1024,
sample_rate=8000,
lower_edge_hertz=80.0,
upper_edge_hertz=7600.0,
num_mel_bins=40):
"""Computes the mel spectrograms of an audio signal
Args:
signal: a batch float tensor [batch_size, clip_length] with values in [-1, 1] representing the audio signal
coefficients: The number of MFCC coefficients to extract
frame_length: Length of short time FFT frame
frame_step: Length of frame step
fft_length: Length of FFT time
sample_rate: Sample rate of audio signal
lower_edge_hertz: Lower-bound of frequencies to include in signal
upper_edge_hertz: Upper-bound of frequencies to include in signal
num_mel_bins: Bands in the mel spectrum
"""
# Short time fourier transform across the signal
stfts = tf.signal.stft(signal,
frame_length=frame_length,
frame_step=frame_step,
fft_length=fft_length)
# Turn into spectrograms
spectrograms = tf.abs(stfts)
# Calculate mel features
num_spectrogram_bins = stfts.shape[-1].value
linear_to_mel_weight_matrix = tf.signal.linear_to_mel_weight_matrix(
num_mel_bins=num_mel_bins, num_spectrogram_bins=num_spectrogram_bins,
sample_rate=sample_rate,
lower_edge_hertz=lower_edge_hertz,
upper_edge_hertz=upper_edge_hertz)
mel_spectrograms = tf.tensordot(
spectrograms, linear_to_mel_weight_matrix, 1)
mel_spectrograms.set_shape(spectrograms.shape[:-1].concatenate(
linear_to_mel_weight_matrix.shape[-1:]))
return mel_spectrograms
| 39.308411
| 115
| 0.659296
| 538
| 4,206
| 4.875465
| 0.182156
| 0.054899
| 0.042699
| 0.051849
| 0.804422
| 0.804422
| 0.804422
| 0.804422
| 0.804422
| 0.804422
| 0
| 0.024021
| 0.277461
| 4,206
| 106
| 116
| 39.679245
| 0.839092
| 0.347836
| 0
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.018519
| 0.037037
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5499eb808f2caff3d7ead892e5ba0349bb36b896
| 3,943
|
py
|
Python
|
2021/05/24/Using Async Functions Inside of Flask Routes/flaskasync/app.py
|
Ujjawal-Rajput/yes
|
d810cdd92651506c17eacbbeac099094bc30323e
|
[
"Unlicense"
] | 492
|
2019-06-25T12:54:31.000Z
|
2022-03-30T12:38:28.000Z
|
2021/05/24/Using Async Functions Inside of Flask Routes/flaskasync/app.py
|
imvickykumar999/youtube_video_code
|
c695b47148a16637c7875c68e6702e8f6037b982
|
[
"Unlicense"
] | 23
|
2019-10-01T01:36:08.000Z
|
2022-02-10T12:46:16.000Z
|
2021/05/24/Using Async Functions Inside of Flask Routes/flaskasync/app.py
|
imvickykumar999/youtube_video_code
|
c695b47148a16637c7875c68e6702e8f6037b982
|
[
"Unlicense"
] | 1,734
|
2019-06-03T06:25:13.000Z
|
2022-03-31T23:57:53.000Z
|
# Email validation: https://eva.pingutil.com/
# Address validation: https://www.lob.com
# Phone number validation: https://veriphone.io/
import asyncio
import httpx
import time
from auth import auth, APIKEY
from flask import Flask, render_template, request
from states import states
app = Flask(__name__)
@app.route('/sync', methods=['GET', 'POST'])
def sync_form():
errors = {}
if request.method == 'POST':
start = time.time()
if not request.form['firstName']:
errors['firstName'] = 'First name is required.'
if not request.form['lastName']:
errors['lastName'] = 'Last name is required.'
email_res = httpx.get(f'https://api.eva.pingutil.com/email?email={request.form["email"]}', timeout=None)
if 'data' in email_res.json() and not email_res.json()['data']['deliverable']:
errors['email'] = 'Invalid email address'
data = {
"primary_line" : request.form["address"],
"secondary_line": request.form["address2"],
"city" : request.form["city"],
"state" : request.form["state"],
"zip_code" : request.form["zip"]
}
address_res = httpx.post(f'https://api.lob.com/v1/us_verifications', auth=auth, data=data)
if 'deliverability' in address_res.json() and not address_res.json()['deliverability'] == 'deliverable':
errors['address'] = 'Invalid address'
errors['address2'] = 'Invalid address'
errors['city'] = 'Invalid city'
errors['state'] = 'Invalid state'
errors['zip'] = 'Invalid zip'
phone_res = httpx.get(f'https://api.veriphone.io/v2/verify?key={APIKEY}&phone={request.form["phoneNumber"]}&default_country=US')
if 'phone_valid' in phone_res.json() and not phone_res.json()['phone_valid']:
errors['phoneNumber'] = 'Invalid phone number'
end = time.time()
print(end - start)
return render_template('form.html', errors=errors, form=request.form, states=states)
@app.route('/async', methods=['GET', 'POST'])
async def async_form():
errors = {}
if request.method == 'POST':
start = time.time()
if not request.form['firstName']:
errors['firstName'] = 'First name is required.'
if not request.form['lastName']:
errors['lastName'] = 'Last name is required.'
data = {
"primary_line" : request.form["address"],
"secondary_line": request.form["address2"],
"city" : request.form["city"],
"state" : request.form["state"],
"zip_code" : request.form["zip"]
}
async with httpx.AsyncClient() as client:
email_res, address_res, phone_res = await asyncio.gather(
client.get(f'https://api.eva.pingutil.com/email?email={request.form["email"]}', timeout=None),
client.post(f'https://api.lob.com/v1/us_verifications', auth=auth, data=data),
client.get(f'https://api.veriphone.io/v2/verify?key={APIKEY}&phone={request.form["phoneNumber"]}&default_country=US')
)
if 'data' in email_res.json() and not email_res.json()['data']['deliverable']:
errors['email'] = 'Invalid email address'
if 'deliverability' in address_res.json() and not address_res.json()['deliverability'] == 'deliverable':
errors['address'] = 'Invalid address'
errors['address2'] = 'Invalid address'
errors['city'] = 'Invalid city'
errors['state'] = 'Invalid state'
errors['zip'] = 'Invalid zip'
if 'phone_valid' in phone_res.json() and not phone_res.json()['phone_valid']:
errors['phoneNumber'] = 'Invalid phone number'
end = time.time()
print(end - start)
return render_template('form.html', errors=errors, form=request.form, states=states)
| 39.039604
| 136
| 0.59929
| 461
| 3,943
| 5.036876
| 0.186551
| 0.094746
| 0.023256
| 0.033592
| 0.820844
| 0.815676
| 0.808786
| 0.808786
| 0.808786
| 0.808786
| 0
| 0.002683
| 0.243723
| 3,943
| 101
| 137
| 39.039604
| 0.775989
| 0.03297
| 0
| 0.702703
| 0
| 0.027027
| 0.319948
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013514
| false
| 0
| 0.081081
| 0
| 0.121622
| 0.027027
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49a6a995c752c129376a8dc96a712fdf0d077cce
| 2,212
|
py
|
Python
|
app/game.py
|
jonathanmendoza-tx/tic-tac-toe
|
4d44235cf9efd89730c9046638cfe878dc9f35db
|
[
"MIT"
] | 1
|
2020-11-23T06:29:40.000Z
|
2020-11-23T06:29:40.000Z
|
app/game.py
|
jonathanmendoza-tx/tic-tac-toe
|
4d44235cf9efd89730c9046638cfe878dc9f35db
|
[
"MIT"
] | null | null | null |
app/game.py
|
jonathanmendoza-tx/tic-tac-toe
|
4d44235cf9efd89730c9046638cfe878dc9f35db
|
[
"MIT"
] | null | null | null |
from game_mechanics import *
turn_counter = 0
board = clear_board()
player_first = False
cont = True
def check_player(board, turn_counter, player_first, cont):
if turn_counter > 5:
win, draw = check_for_win(board)
if win:
print('You win!')
if player_first:
player_first = False
else:
player_first = True
answer = input('Would you like to continue? ([y]es, [n]o): ')
if answer[0].lower() == 'n':
cont = False
turn_counter = 0
board = clear_board()
if draw:
print('Draw!')
if player_first:
player_first = False
else:
player_first = True
answer = input('Would you like to continue? ([y]es, [n]o): ')
if answer[0].lower() == 'n':
cont = False
turn_counter = 0
board = clear_board()
return board, cont
def check_ai(board, turn_counter, player_first, cont):
if turn_counter > 5:
win, draw = check_for_win(board)
if win:
print('You lose :(')
if player_first:
player_first = False
else:
player_first = True
answer = input('Would you like to continue? ([y]es, [n]o): ')
if answer[0].lower() == 'n':
cont = False
turn_counter = 0
board = clear_board()
if draw:
print('Draw!')
if player_first:
player_first = False
else:
player_first = True
answer = input('Would you like to continue? ([y]es, [n]o): ')
if answer[0].lower() == 'n':
cont = False
turn_counter = 0
board = clear_board()
return board, cont
while cont:
if player_first:
display_board(board)
display_visual_aid()
move = input('Make your move (0-8): ')
board = player_move(board, int(move))
print(f'\n\n')
turn_counter += 1
board, cont = check_player(board, turn_counter, player_first, cont)
board = ai_move(board)
print(f'\n\n')
turn_counter += 1
board, cont = check_ai(board, turn_counter, player_first, cont)
else:
board = ai_move(board)
print(f'\n\n')
turn_counter += 1
board, cont = check_ai(board, turn_counter, player_first, cont)
display_visual_aid()
move = input('Make your move (0-8): ')
board = player_move(board, int(move))
print(f'\n\n')
turn_counter += 1
board, cont = check_player(board, turn_counter, player_first, cont)
| 19.75
| 69
| 0.641953
| 330
| 2,212
| 4.115152
| 0.148485
| 0.162003
| 0.070692
| 0.097202
| 0.924153
| 0.924153
| 0.904271
| 0.904271
| 0.891016
| 0.891016
| 0
| 0.011079
| 0.224684
| 2,212
| 111
| 70
| 19.927928
| 0.780758
| 0
| 0
| 0.901235
| 0
| 0
| 0.119855
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024691
| false
| 0
| 0.012346
| 0
| 0.061728
| 0.098765
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49ad074dfe1d00b7a81d206d93c67787936e32b3
| 6,213
|
py
|
Python
|
tests/test_standard_convolution.py
|
meder411/spherical-package
|
73d51a25da5891d12e4c04d8ad2e6f1854ffa121
|
[
"BSD-3-Clause"
] | 8
|
2020-06-13T19:49:06.000Z
|
2022-02-24T07:16:02.000Z
|
tests/test_standard_convolution.py
|
meder411/spherical-package
|
73d51a25da5891d12e4c04d8ad2e6f1854ffa121
|
[
"BSD-3-Clause"
] | 4
|
2020-07-03T08:44:13.000Z
|
2021-09-17T12:18:57.000Z
|
tests/test_standard_convolution.py
|
meder411/spherical-package
|
73d51a25da5891d12e4c04d8ad2e6f1854ffa121
|
[
"BSD-3-Clause"
] | 3
|
2020-06-10T23:30:20.000Z
|
2020-12-29T13:50:01.000Z
|
import torch
import torch.nn as nn
import torch.testing as testing
from torch.autograd import gradcheck
import pytest
from spherical_distortion.nn import Convolution, TransposedConvolution
import utils
import parameters as params
bs = 3
in_channels = 2
out_channels = 3
def test_standard_conv_cpu():
'''Simply compares our result to PyTorch's implementation'''
input = torch.ones(params.bs, params.in_channels, 8, 8).double()
input.requires_grad = True
pytorch_layer = torch.nn.Conv2d(in_channels,
out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double()
my_layer = Convolution(in_channels,
out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double()
# Set the layer weights
pytorch_layer.weight.data.fill_(1)
pytorch_layer.bias.data.fill_(2)
my_layer.weight.data.fill_(1)
my_layer.bias.data.fill_(2)
# Run a forward pass
pytorch_output = pytorch_layer(input)
my_output = my_layer(input)
# Run a numerical gradient check
gradcheck_res = gradcheck(my_layer, (input))
# Ensure our implementation passes gradcheck
assert gradcheck_res
# Make sure they match
testing.assert_allclose(my_output, pytorch_output)
def test_standard_transposed_conv_cpu():
'''Simply compares our result to PyTorch's implementation'''
input = torch.ones(params.bs, params.in_channels, 5, 5).double()
input.requires_grad = True
pytorch_layer = torch.nn.ConvTranspose2d(params.in_channels,
params.out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double()
my_layer = TransposedConvolution(params.in_channels,
params.out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double()
# Set the layer weights
pytorch_layer.weight.data.fill_(1)
pytorch_layer.bias.data.fill_(2)
my_layer.weight.data.fill_(1)
my_layer.bias.data.fill_(2)
# Run a forward pass
pytorch_output = pytorch_layer(input)
my_output = my_layer(input)
# Run a numerical gradient check
gradcheck_res = gradcheck(my_layer, (input))
# Ensure our implementation passes gradcheck
assert gradcheck_res
# Make sure they match
testing.assert_allclose(my_output, pytorch_output)
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# GPU TESTS
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
@pytest.mark.skipif(not torch.cuda.is_available(),
reason='CUDA not detected on system')
def test_standard_conv_cuda():
'''Simply compares our result to PyTorch's implementation'''
input = torch.ones(params.bs, params.in_channels, 8, 8).double().cuda()
input.requires_grad = True
pytorch_layer = torch.nn.Conv2d(in_channels,
out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double().cuda()
my_layer = Convolution(in_channels,
out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double().cuda()
# Set the layer weights
pytorch_layer.weight.data.fill_(1)
pytorch_layer.bias.data.fill_(2)
my_layer.weight.data.fill_(1)
my_layer.bias.data.fill_(2)
# Run a forward pass
pytorch_output = pytorch_layer(input)
my_output = my_layer(input)
# Run a numerical gradient check
gradcheck_res = gradcheck(my_layer, (input))
# Ensure our implementation passes gradcheck
assert gradcheck_res
# Make sure they match
testing.assert_allclose(my_output, pytorch_output)
@pytest.mark.skipif(not torch.cuda.is_available(),
reason='CUDA not detected on system')
def test_standard_transposed_conv_cuda():
'''Simply compares our result to PyTorch's implementation'''
input = torch.ones(params.bs, params.in_channels, 5, 5).double().cuda()
input.requires_grad = True
pytorch_layer = torch.nn.ConvTranspose2d(params.in_channels,
params.out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double().cuda()
my_layer = TransposedConvolution(params.in_channels,
params.out_channels,
kernel_size=3,
stride=2,
padding=1,
dilation=2).double().cuda()
# Set the layer weights
pytorch_layer.weight.data.fill_(1)
pytorch_layer.bias.data.fill_(2)
my_layer.weight.data.fill_(1)
my_layer.bias.data.fill_(2)
# Run a forward pass
pytorch_output = pytorch_layer(input)
my_output = my_layer(input)
# Run a numerical gradient check
gradcheck_res = gradcheck(my_layer, (input))
# Ensure our implementation passes gradcheck
assert gradcheck_res
# Make sure they match
testing.assert_allclose(my_output, pytorch_output)
| 34.516667
| 78
| 0.522453
| 630
| 6,213
| 4.944444
| 0.142857
| 0.044944
| 0.041091
| 0.053933
| 0.921027
| 0.909791
| 0.909791
| 0.909791
| 0.909791
| 0.909791
| 0
| 0.015738
| 0.355706
| 6,213
| 180
| 79
| 34.516667
| 0.762428
| 0.174151
| 0
| 0.828829
| 0
| 0
| 0.010615
| 0
| 0
| 0
| 0
| 0
| 0.072072
| 1
| 0.036036
| false
| 0
| 0.072072
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3fc3213a0b6cfb10aa3a51aaa466386a6d497b99
| 42
|
py
|
Python
|
app/routes/__init__.py
|
izlatin/WEB-project
|
0804f7b87a5c1698f601484444c93e132bb8d6e6
|
[
"MIT"
] | null | null | null |
app/routes/__init__.py
|
izlatin/WEB-project
|
0804f7b87a5c1698f601484444c93e132bb8d6e6
|
[
"MIT"
] | null | null | null |
app/routes/__init__.py
|
izlatin/WEB-project
|
0804f7b87a5c1698f601484444c93e132bb8d6e6
|
[
"MIT"
] | null | null | null |
from . import auth
from . import main_page
| 21
| 23
| 0.785714
| 7
| 42
| 4.571429
| 0.714286
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 2
| 23
| 21
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3fde806b82ba3dfc0d59b0a39e37669e68ceebf6
| 82,437
|
py
|
Python
|
tests/test_polyaxonfile/test_polyaxonfile.py
|
granularai/polyaxon-schemas
|
017ae74701f21f12f0b25e75379681ea5d8baa9e
|
[
"MIT"
] | null | null | null |
tests/test_polyaxonfile/test_polyaxonfile.py
|
granularai/polyaxon-schemas
|
017ae74701f21f12f0b25e75379681ea5d8baa9e
|
[
"MIT"
] | null | null | null |
tests/test_polyaxonfile/test_polyaxonfile.py
|
granularai/polyaxon-schemas
|
017ae74701f21f12f0b25e75379681ea5d8baa9e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import os
from unittest import TestCase
from flaky import flaky
from polyaxon_schemas.exceptions import PolyaxonfileError
from polyaxon_schemas.ops.build_job import BuildConfig
from polyaxon_schemas.ops.environments.pods import EnvironmentConfig
from polyaxon_schemas.ops.environments.resources import K8SResourcesConfig, PodResourcesConfig
from polyaxon_schemas.ops.experiment.backends import ExperimentBackend
from polyaxon_schemas.ops.experiment.frameworks import ExperimentFramework
from polyaxon_schemas.ops.group.early_stopping_policies import EarlyStoppingConfig
from polyaxon_schemas.ops.group.hptuning import HPTuningConfig, SearchAlgorithms
from polyaxon_schemas.ops.group.matrix import MatrixConfig
from polyaxon_schemas.ops.logging import LoggingConfig
from polyaxon_schemas.ops.run import RunConfig
from polyaxon_schemas.polyaxonfile import PolyaxonFile
from polyaxon_schemas.specs import ExperimentSpecification
from polyaxon_schemas.specs.frameworks import (
HorovodSpecification,
MPISpecification,
MXNetSpecification,
PytorchSpecification,
TensorflowSpecification
)
from polyaxon_schemas.utils import TaskType
class TestPolyaxonfile(TestCase):
def test_missing_version_raises(self):
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath('tests/fixtures/plain/missing_version.yml'))
def test_non_supported_version_raises(self):
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/non_supported_file.yml.yml'))
def test_missing_kind_raises(self):
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath('tests/fixtures/plain/missing_kind.yml'))
def test_simple_file_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/simple_file.yml'), )
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.logging is None
assert spec.tags is None
assert spec.params is None
assert spec.build.image == 'my_image'
assert spec.run.cmd == 'video_prediction_train --model=DNA --num_masks=1'
assert spec.environment is None
assert spec.framework is None
assert spec.is_experiment
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.is_experiment is True
def test_passing_params_overrides_polyaxonfiles(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/simple_file.yml'),
params={'foo': 'bar', 'value': 1.1})
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.logging is None
assert spec.tags is None
assert spec.params == {'foo': 'bar', 'value': 1.1}
assert spec.build.image == 'my_image'
assert spec.run.cmd == 'video_prediction_train --model=DNA --num_masks=1'
assert spec.environment is None
assert spec.framework is None
assert spec.is_experiment
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.is_experiment is True
def test_passing_wrong_params_raises(self):
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath('tests/fixtures/plain/simple_file.yml'), params='foo')
def test_passing_debug_ttl_overrides_polyaxonfiles(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/simple_file.yml'),
debug_ttl=100)
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.logging is None
assert spec.tags is None
assert spec.build.image == 'my_image'
assert spec.run.cmd == 'sleep 100'
assert spec.environment is None
assert spec.framework is None
assert spec.is_experiment
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.is_experiment is True
def test_passing_wrong_debug_ttl_raises(self):
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath('tests/fixtures/plain/simple_file.yml'), debug_ttl='foo')
def test_passing_wrong_kind_with_debug_ttl_raises(self):
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath('tests/fixtures/plain/matrix_file.yml'), debug_ttl=120)
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/build_with_context_and_dockerfile.yml'), debug_ttl=120)
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/tensorboard_with_custom_environment.yml'), debug_ttl=120)
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/notebook_with_custom_environment.yml'), debug_ttl=120)
def test_simple_file_framework_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/simple_file_framework.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.logging is None
assert spec.tags is None
assert spec.build.dockerfile == 'Dockerfile'
assert spec.run.cmd == 'video_prediction_train --model=DNA --num_masks=1'
assert spec.environment is not None
assert spec.environment.resources.gpu.to_dict() == {'requests': 1, 'limits': 1}
assert spec.framework is not None
assert spec.is_experiment is True
def test_advanced_file_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/advanced_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert isinstance(spec.logging, LoggingConfig)
assert spec.is_experiment
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.framework == ExperimentFramework.TENSORFLOW
assert spec.config.tensorflow.n_workers == 5
assert spec.config.tensorflow.n_ps == 10
# check properties for returning worker configs and resources
assert spec.config.tensorflow.worker_resources == {}
assert spec.config.tensorflow.ps_resources == {}
cluster, is_distributed = spec.cluster_def
assert TensorflowSpecification.get_worker_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
) == {}
assert TensorflowSpecification.get_ps_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
) == {}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5,
TaskType.PS: 10}, True)
def test_advanced_file_with_custom_configs_and_resources_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/advanced_file_with_custom_configs_and_resources.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert isinstance(spec.logging, LoggingConfig)
assert spec.is_experiment
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.framework == ExperimentFramework.TENSORFLOW
assert spec.artifact_refs == ['outputs1']
assert spec.data_refs == ['data1', 'data2']
assert spec.secret_refs == ['secret1', 'secret2']
assert spec.config_map_refs == ['config_map1', 'config_map2']
assert spec.config.tensorflow.n_workers == 5
assert spec.config.tensorflow.n_ps == 10
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert spec.config.tensorflow.default_worker_node_selector == {
'foo': True
}
assert spec.config.tensorflow.default_worker_labels == {
'key1': 'val1'
}
assert spec.config.tensorflow.default_worker_annotations == {
'key1': 'val1'
}
assert spec.config.tensorflow.worker_resources == {}
assert spec.config.tensorflow.worker_affinities == {}
assert isinstance(spec.config.tensorflow.worker_node_selectors[3], dict)
assert spec.config.tensorflow.worker_node_selectors[3] == {
'foo': False
}
assert spec.config.tensorflow.worker_labels[3] == {
'key1': 'val2'
}
assert spec.config.tensorflow.worker_annotations[3] == {
'key1': 'val2'
}
assert isinstance(spec.config.tensorflow.worker_tolerations[4], list)
assert spec.config.tensorflow.worker_tolerations[4] == [{
'key': 'key',
'operator': 'Exists',
'effect': 'NoSchedule',
}]
assert isinstance(spec.config.tensorflow.default_ps_resources, PodResourcesConfig)
assert isinstance(spec.config.tensorflow.default_ps_resources.cpu, K8SResourcesConfig)
assert spec.config.tensorflow.default_ps_resources.cpu.requests == 2
assert spec.config.tensorflow.default_ps_resources.cpu.limits == 4
assert spec.config.tensorflow.ps_node_selectors == {}
assert isinstance(spec.config.tensorflow.ps_tolerations[7], list)
assert spec.config.tensorflow.ps_tolerations[7] == [{
'operator': 'Exists'
}]
assert isinstance(spec.config.tensorflow.ps_affinities[7], dict)
assert isinstance(spec.config.tensorflow.ps_resources[9], PodResourcesConfig)
assert isinstance(spec.config.tensorflow.ps_resources[9].memory, K8SResourcesConfig)
assert spec.config.tensorflow.ps_resources[9].memory.requests == 512
assert spec.config.tensorflow.ps_resources[9].memory.limits == 1024
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_node_selectors = TensorflowSpecification.get_worker_node_selectors(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
worker_labels = TensorflowSpecification.get_worker_labels(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
worker_annotations = TensorflowSpecification.get_worker_annotations(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_node_selectors) == spec.config.tensorflow.n_workers
assert len(worker_labels) == spec.config.tensorflow.n_workers
assert len(worker_annotations) == spec.config.tensorflow.n_workers
assert set([i['foo'] for i in worker_node_selectors.values()]) == {
spec.config.tensorflow.default_worker_node_selector['foo'],
spec.config.tensorflow.worker_node_selectors[3]['foo']}
assert TensorflowSpecification.get_worker_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
) == {}
ps_resources = TensorflowSpecification.get_ps_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert len(ps_resources) == spec.config.tensorflow.n_ps
assert set(ps_resources.values()) == {
spec.config.tensorflow.default_ps_resources,
spec.config.tensorflow.ps_resources[9]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 2 * 9, 'limits': 2 + 4 * 9},
'memory': {'requests': 512, 'limits': 1024},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5,
TaskType.PS: 10}, True)
def test_wrong_grid_matrix_file_passes(self):
with self.assertRaises(PolyaxonfileError):
PolyaxonFile(os.path.abspath('tests/fixtures/plain/wrong_grid_matrix_file.yml'))
@flaky(max_runs=3)
def test_matrix_file_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/matrix_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_group
assert isinstance(spec.hptuning.matrix['lr'], MatrixConfig)
assert isinstance(spec.hptuning.matrix['loss'], MatrixConfig)
assert spec.hptuning.matrix['lr'].to_dict() == {
'linspace': {'start': 0.01, 'stop': 0.1, 'num': 5}}
assert spec.hptuning.matrix['loss'].to_dict() == {'values': ['MeanSquaredError',
'AbsoluteDifference']}
assert spec.matrix_space == 10
assert isinstance(spec.hptuning, HPTuningConfig)
assert spec.hptuning.concurrency == 2
assert spec.search_algorithm == SearchAlgorithms.GRID
assert spec.hptuning.early_stopping is None
assert spec.early_stopping == []
assert spec.experiments_def == {
'search_algorithm': SearchAlgorithms.GRID,
'early_stopping': False,
'concurrency': 2,
}
build = spec.build
assert build is None
spec = spec.get_experiment_spec(matrix_declaration=spec.matrix_declaration_test)
spec.apply_context()
assert spec.environment is not None
assert spec.artifact_refs == ['outputs1']
assert spec.data_refs == ['data1', 'data2']
# TODO
# assert spec.outputs.jobs == [111]
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.run.cmd == 'train --lr={lr} --loss={loss}'.format(
**spec.params
)
def test_matrix_file_passes_int_float_types(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/matrix_file_with_int_float_types.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_group
assert isinstance(spec.hptuning.matrix['param1'], MatrixConfig)
assert isinstance(spec.hptuning.matrix['param2'], MatrixConfig)
assert spec.hptuning.matrix['param1'].to_dict() == {'values': [1, 2]}
assert spec.hptuning.matrix['param2'].to_dict() == {'values': [3.3, 4.4]}
assert spec.matrix_space == 4
assert isinstance(spec.hptuning, HPTuningConfig)
assert spec.hptuning.concurrency == 2
assert spec.search_algorithm == SearchAlgorithms.GRID
assert spec.hptuning.early_stopping is None
assert spec.early_stopping == []
assert spec.experiments_def == {
'search_algorithm': SearchAlgorithms.GRID,
'early_stopping': False,
'concurrency': 2,
}
build = spec.build
assert build is None
spec = spec.get_experiment_spec(matrix_declaration=spec.matrix_declaration_test)
spec.apply_context()
assert spec.environment is None
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.run.cmd == 'train --param1={param1} --param2={param2}'.format(
**spec.params
)
@flaky(max_runs=3)
def test_matrix_early_stopping_file_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/matrix_file_early_stopping.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_group
assert isinstance(spec.hptuning.matrix['lr'], MatrixConfig)
assert isinstance(spec.hptuning.matrix['loss'], MatrixConfig)
assert spec.hptuning.matrix['lr'].to_dict() == {
'linspace': {'start': 0.01, 'stop': 0.1, 'num': 5}}
assert spec.hptuning.matrix['loss'].to_dict() == {'values': ['MeanSquaredError',
'AbsoluteDifference']}
assert spec.matrix_space == 10
assert isinstance(spec.hptuning, HPTuningConfig)
assert spec.hptuning.concurrency == 2
assert spec.hptuning.random_search.n_experiments == 5
assert spec.early_stopping == spec.hptuning.early_stopping
assert len(spec.hptuning.early_stopping) == 1
assert isinstance(spec.hptuning.early_stopping[0], EarlyStoppingConfig)
assert spec.experiments_def == {
'search_algorithm': SearchAlgorithms.RANDOM,
'early_stopping': True,
'concurrency': 2,
'n_experiments': 5
}
build = spec.build
assert build is None
spec = spec.get_experiment_spec(matrix_declaration=spec.matrix_declaration_test)
spec.apply_context()
assert spec.environment is None
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.run.cmd == 'train --lr={lr} --loss={loss}'.format(
**spec.params
)
@flaky(max_runs=3)
def test_matrix_large_n_experiments_ignored_file_passes(self):
plxfile = PolyaxonFile(
os.path.abspath('tests/fixtures/plain/matrix_file_ignored_n_experiments.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_group
assert isinstance(spec.hptuning.matrix['lr'], MatrixConfig)
assert isinstance(spec.hptuning.matrix['loss'], MatrixConfig)
assert spec.hptuning.matrix['lr'].to_dict() == {
'linspace': {'start': 0.01, 'stop': 0.1, 'num': 5}}
assert spec.hptuning.matrix['loss'].to_dict() == {'values': ['MeanSquaredError',
'AbsoluteDifference']}
assert spec.matrix_space == 10
assert isinstance(spec.hptuning, HPTuningConfig)
assert spec.hptuning.concurrency == 2
assert spec.search_algorithm == SearchAlgorithms.RANDOM
assert spec.hptuning.random_search.n_experiments == 300
assert spec.early_stopping == []
assert spec.experiments_def == {
'search_algorithm': SearchAlgorithms.RANDOM,
'early_stopping': False,
'concurrency': 2,
'n_experiments': 300
}
build = spec.build
assert build is None
spec = spec.get_experiment_spec(matrix_declaration=spec.matrix_declaration_test)
spec.apply_context()
assert spec.environment is None
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.run.cmd == 'train --lr={lr} --loss={loss}'.format(
**spec.params
)
@flaky(max_runs=3)
def test_one_matrix_file_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/one_matrix_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_group
assert spec.hptuning is not None
assert isinstance(spec.hptuning.matrix['loss'], MatrixConfig)
assert spec.hptuning.matrix['loss'].to_dict() == {'values': ['MeanSquaredError',
'AbsoluteDifference']}
assert spec.matrix_space == 2
assert spec.build.ref == 1
spec = spec.get_experiment_spec(matrix_declaration=spec.matrix_declaration_test)
spec.apply_context()
assert spec.environment is None
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
assert spec.run.cmd == 'train --loss="{}"'.format(spec.params['loss'])
@flaky(max_runs=3)
def test_one_matrix_file_passes(self):
plxfile = PolyaxonFile(
os.path.abspath('tests/fixtures/typing/test_parallel_distributed.yaml')
)
spec = plxfile.specification
spec.apply_context()
def test_run_simple_file_passes_sdf(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/run_cmd_simple_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.logging is None
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert spec.is_experiment
assert isinstance(spec.build, BuildConfig)
assert isinstance(spec.run, RunConfig)
assert spec.environment is None
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
run = spec.run
assert isinstance(run, RunConfig)
assert run.cmd == "video_prediction_train --num_masks=2"
def test_run_simple_file_with_cmds_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/run_cmd_simple_file_list_cmds.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.logging is None
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert spec.is_experiment
assert isinstance(spec.build, BuildConfig)
assert isinstance(spec.run, RunConfig)
assert spec.environment is None
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
run = spec.run
assert isinstance(run, RunConfig)
assert run.cmd == ['video_prediction_train --model=DNA --num_masks=1',
'video_prediction_train --model=DNA --num_masks=10']
def test_run_simple_file_with_build_env_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/run_cmd_with_build_env.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.logging is None
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert spec.is_experiment
assert isinstance(spec.build, BuildConfig)
assert spec.build.environment is not None
assert spec.build.environment.node_selector == {'polyaxon.com': 'node_for_build_jobs'}
assert isinstance(spec.build.environment.resources, PodResourcesConfig)
assert isinstance(spec.build.environment.affinity, dict)
assert isinstance(spec.run, RunConfig)
assert spec.environment is None
assert spec.framework is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
run = spec.run
assert isinstance(run, RunConfig)
assert run.cmd == ['video_prediction_train --model=DNA --num_masks=1',
'video_prediction_train --model=DNA --num_masks=10']
def test_run_matrix_file_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/run_cmd_matrix_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_group
assert isinstance(spec.hptuning.matrix['model'], MatrixConfig)
assert spec.hptuning.matrix['model'].to_dict() == {'values': ['CDNA', 'DNA', 'STP']}
assert spec.matrix_space == 3
assert isinstance(spec.hptuning, HPTuningConfig)
params = spec.matrix_declaration_test
build = spec.build
assert isinstance(build, BuildConfig)
assert build.image == 'my_image'
spec = spec.get_experiment_spec(params)
spec.apply_context()
assert spec.environment is None
assert spec.logging is None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
run = spec.run
assert isinstance(run, RunConfig)
# params['num_masks'] = 1 if params['model'] == 'DNA' else 10
params['num_masks'] = 10
assert run.cmd == ('video_prediction_train '
'--model="{model}" '
'--num_masks={num_masks}').format(
**params
)
def test_run_matrix_sampling_file_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/run_cmd_matrix_sampling_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_group
assert isinstance(spec.build, BuildConfig)
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert isinstance(spec.hptuning.matrix['model'], MatrixConfig)
assert spec.hptuning.matrix['learning_rate'].to_dict() == {
'normal': {'loc': 0, 'scale': 0.9}}
assert spec.hptuning.matrix['dropout'].to_dict() == {
'qloguniform': {'high': 0.8, 'low': 0, 'q': 0.1}}
assert spec.hptuning.matrix['activation'].to_dict() == {
'pvalues': [['relu', 0.1], ['sigmoid', 0.8]]}
assert spec.hptuning.matrix['model'].to_dict() == {'values': ['CDNA', 'DNA', 'STP']}
assert isinstance(spec.hptuning, HPTuningConfig)
params = spec.matrix_declaration_test
build = spec.build
assert isinstance(build, BuildConfig)
assert build.image == 'my_image'
spec = spec.get_experiment_spec(params)
spec.apply_context()
assert spec.environment is None
assert spec.logging is not None
assert spec.cluster_def == ({TaskType.MASTER: 1}, False)
run = spec.run
assert isinstance(run, RunConfig)
# params['num_masks'] = 1 if params['model'] == 'DNA' else 10
params['num_masks'] = 10
assert run.cmd == ('video_prediction_train '
'--model="{model}" '
'--num_masks={num_masks}').format(
**params
)
def test_distributed_tensorflow_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_tensorflow_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert isinstance(spec.logging, LoggingConfig)
assert spec.is_experiment
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.environment.node_selector is None
assert spec.master_node_selector is None
assert spec.framework == ExperimentFramework.TENSORFLOW
assert spec.config.tensorflow.n_workers == 5
assert spec.config.tensorflow.n_ps == 10
assert spec.environment.tolerations is None
assert spec.environment.node_selector is None
assert isinstance(spec.environment.affinity, dict)
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert spec.config.tensorflow.default_worker_node_selector is None
assert spec.config.tensorflow.default_worker_affinity is None
assert isinstance(spec.config.tensorflow.default_worker_tolerations, list)
assert isinstance(spec.config.tensorflow.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.tensorflow.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.tensorflow.default_worker_resources.cpu.requests == 3
assert spec.config.tensorflow.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.tensorflow.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.tensorflow.default_worker_resources.memory.requests == 256
assert spec.config.tensorflow.default_worker_resources.memory.limits == 256
assert spec.config.tensorflow.worker_tolerations[2] == [{'operator': 'Exists'}]
assert isinstance(spec.config.tensorflow.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.tensorflow.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.tensorflow.worker_resources[3].memory.requests == 300
assert spec.config.tensorflow.worker_resources[3].memory.limits == 300
assert spec.config.tensorflow.default_ps_node_selector is None
assert spec.config.tensorflow.default_ps_affinity is None
assert isinstance(spec.config.tensorflow.default_ps_tolerations, list)
assert isinstance(spec.config.tensorflow.default_ps_resources, PodResourcesConfig)
assert isinstance(spec.config.tensorflow.default_ps_resources.cpu, K8SResourcesConfig)
assert spec.config.tensorflow.default_ps_resources.cpu.requests == 2
assert spec.config.tensorflow.default_ps_resources.cpu.limits == 4
assert isinstance(spec.config.tensorflow.ps_resources[9], PodResourcesConfig)
assert isinstance(spec.config.tensorflow.ps_resources[9].memory, K8SResourcesConfig)
assert spec.config.tensorflow.ps_resources[9].memory.requests == 512
assert spec.config.tensorflow.ps_resources[9].memory.limits == 1024
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_affinities = TensorflowSpecification.get_worker_affinities(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
worker_tolerations = TensorflowSpecification.get_worker_tolerations(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
worker_node_selectors = TensorflowSpecification.get_worker_node_selectors(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
worker_resources = TensorflowSpecification.get_worker_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert worker_affinities == {}
assert worker_node_selectors == {}
assert len(worker_tolerations) == spec.config.tensorflow.n_workers
assert len(worker_resources) == spec.config.tensorflow.n_workers
assert set(worker_resources.values()) == {
spec.config.tensorflow.default_worker_resources,
spec.config.tensorflow.worker_resources[3]}
ps_tolerations = TensorflowSpecification.get_ps_tolerations(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
ps_affinities = TensorflowSpecification.get_ps_affinities(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
ps_node_selectors = TensorflowSpecification.get_ps_node_selectors(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
ps_resources = TensorflowSpecification.get_ps_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert ps_affinities == {}
assert ps_node_selectors == {}
assert len(ps_tolerations) == spec.config.tensorflow.n_ps
assert len(ps_resources) == spec.config.tensorflow.n_ps
assert set(ps_resources.values()) == {
spec.config.tensorflow.default_ps_resources,
spec.config.tensorflow.ps_resources[9]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4 + 2 * 9, 'limits': 2 + 3 * 4 + 4 * 9},
'memory': {'requests': 300 + 256 * 4 + 512, 'limits': 300 + 256 * 4 + 1024},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5,
TaskType.PS: 10}, True)
def test_distributed_tensorflow_passes_with_node_selectors(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_tensorflow_with_node_selectors_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.environment.node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.master_node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.framework == ExperimentFramework.TENSORFLOW
assert spec.config.tensorflow.n_workers == 5
assert spec.config.tensorflow.n_ps == 10
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert isinstance(spec.config.tensorflow.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.tensorflow.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.tensorflow.default_worker_resources.cpu.requests == 3
assert spec.config.tensorflow.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.tensorflow.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.tensorflow.default_worker_resources.memory.requests == 256
assert spec.config.tensorflow.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.tensorflow.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.tensorflow.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.tensorflow.worker_resources[3].memory.requests == 300
assert spec.config.tensorflow.worker_resources[3].memory.limits == 300
assert isinstance(spec.config.tensorflow.default_ps_resources, PodResourcesConfig)
assert isinstance(spec.config.tensorflow.default_ps_resources.cpu, K8SResourcesConfig)
assert spec.config.tensorflow.default_ps_resources.cpu.requests == 2
assert spec.config.tensorflow.default_ps_resources.cpu.limits == 4
assert isinstance(spec.config.tensorflow.ps_resources[9], PodResourcesConfig)
assert isinstance(spec.config.tensorflow.ps_resources[9].memory, K8SResourcesConfig)
assert spec.config.tensorflow.ps_resources[9].memory.requests == 512
assert spec.config.tensorflow.ps_resources[9].memory.limits == 1024
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = TensorflowSpecification.get_worker_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_resources) == spec.config.tensorflow.n_workers
assert set(worker_resources.values()) == {
spec.config.tensorflow.default_worker_resources,
spec.config.tensorflow.worker_resources[3]}
ps_resources = TensorflowSpecification.get_ps_resources(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert len(ps_resources) == spec.config.tensorflow.n_ps
assert set(ps_resources.values()) == {
spec.config.tensorflow.default_ps_resources,
spec.config.tensorflow.ps_resources[9]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4 + 2 * 9, 'limits': 2 + 3 * 4 + 4 * 9},
'memory': {'requests': 300 + 256 * 4 + 512, 'limits': 300 + 256 * 4 + 1024},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5,
TaskType.PS: 10}, True)
assert (spec.config.tensorflow.default_worker.node_selector ==
{'polyaxon.com': 'node_for_worker_tasks'})
assert (spec.config.tensorflow.worker_node_selectors[2] ==
{'polyaxon.com': 'node_for_worker_task_2'})
assert (spec.config.tensorflow.default_ps.node_selector ==
{'polyaxon.com': 'node_for_ps_tasks'})
assert (spec.config.tensorflow.ps_node_selectors[2] ==
{'polyaxon.com': 'node_for_ps_task_2'})
worker_node_selectors = TensorflowSpecification.get_worker_node_selectors(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_node_selectors) == spec.config.tensorflow.n_workers
assert set(tuple(i.items()) for i in worker_node_selectors.values()) == {
tuple(spec.config.tensorflow.default_worker.node_selector.items()),
tuple(spec.config.tensorflow.worker_node_selectors[2].items())}
ps_node_selectors = TensorflowSpecification.get_ps_node_selectors(
environment=spec.config.tensorflow,
cluster=cluster,
is_distributed=is_distributed
)
assert len(ps_node_selectors) == spec.config.tensorflow.n_ps
assert set(tuple(i.items()) for i in ps_node_selectors.values()) == {
tuple(spec.config.tensorflow.default_ps.node_selector.items()),
tuple(spec.config.tensorflow.ps_node_selectors[2].items())}
def test_distributed_horovod_passes(self):
plxfile = PolyaxonFile(os.path.abspath('tests/fixtures/plain/distributed_horovod_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.framework == ExperimentFramework.HOROVOD
assert spec.config.horovod.n_workers == 5
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert isinstance(spec.config.horovod.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.horovod.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.horovod.default_worker_resources.cpu.requests == 3
assert spec.config.horovod.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.horovod.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.horovod.default_worker_resources.memory.requests == 256
assert spec.config.horovod.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.horovod.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.horovod.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.horovod.worker_resources[3].memory.requests == 300
assert spec.config.horovod.worker_resources[3].memory.limits == 300
assert isinstance(spec.environment.affinity, dict)
assert spec.config.horovod.worker_affinities == {}
assert spec.environment.tolerations is None
assert isinstance(spec.config.horovod.default_worker_tolerations, list)
assert isinstance(spec.config.horovod.worker_tolerations[2], list)
assert spec.config.horovod.worker_tolerations[2] == [{'operator': 'Exists'}]
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = HorovodSpecification.get_worker_resources(
environment=spec.config.horovod,
cluster=cluster,
is_distributed=is_distributed
)
worker_node_selectors = HorovodSpecification.get_worker_node_selectors(
environment=spec.config.horovod,
cluster=cluster,
is_distributed=is_distributed
)
worker_affinities = HorovodSpecification.get_worker_affinities(
environment=spec.config.horovod,
cluster=cluster,
is_distributed=is_distributed
)
worker_tolerations = HorovodSpecification.get_worker_tolerations(
environment=spec.config.horovod,
cluster=cluster,
is_distributed=is_distributed
)
assert worker_node_selectors == {}
assert worker_affinities == {}
assert len(worker_tolerations) == spec.config.horovod.n_workers
assert len(worker_resources) == spec.config.horovod.n_workers
assert set(worker_resources.values()) == {
spec.config.horovod.default_worker_resources,
spec.config.horovod.worker_resources[3]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4, 'limits': 2 + 3 * 4},
'memory': {'requests': 300 + 256 * 4, 'limits': 300 + 256 * 4},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5}, True)
def test_distributed_horovod_with_node_selectors_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_horovod_with_node_selectors_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.environment.node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.master_node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.framework == ExperimentFramework.HOROVOD
assert spec.config.horovod.n_workers == 5
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert isinstance(spec.config.horovod.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.horovod.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.horovod.default_worker_resources.cpu.requests == 3
assert spec.config.horovod.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.horovod.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.horovod.default_worker_resources.memory.requests == 256
assert spec.config.horovod.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.horovod.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.horovod.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.horovod.worker_resources[3].memory.requests == 300
assert spec.config.horovod.worker_resources[3].memory.limits == 300
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = HorovodSpecification.get_worker_resources(
environment=spec.config.horovod,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_resources) == spec.config.horovod.n_workers
assert set(worker_resources.values()) == {
spec.config.horovod.default_worker_resources,
spec.config.horovod.worker_resources[3]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4, 'limits': 2 + 3 * 4},
'memory': {'requests': 300 + 256 * 4, 'limits': 300 + 256 * 4},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5}, True)
assert (spec.config.horovod.default_worker.node_selector ==
{'polyaxon.com': 'node_for_worker_tasks'})
assert (spec.config.horovod.worker_node_selectors[2] ==
{'polyaxon.com': 'node_for_worker_task_2'})
worker_node_selectors = HorovodSpecification.get_worker_node_selectors(
environment=spec.config.horovod,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_node_selectors) == spec.config.horovod.n_workers
assert set(tuple(i.items()) for i in worker_node_selectors.values()) == {
tuple(spec.config.horovod.default_worker.node_selector.items()),
tuple(spec.config.horovod.worker_node_selectors[2].items())}
def test_distributed_pytorch_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_pytorch_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.framework == ExperimentFramework.PYTORCH
assert spec.config.pytorch.n_workers == 5
assert spec.environment.node_selector is None
assert spec.environment.tolerations is None
assert isinstance(spec.environment.affinity, dict)
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert spec.config.pytorch.default_worker_node_selector is None
assert spec.config.pytorch.default_worker_affinity is None
assert isinstance(spec.config.pytorch.default_worker_tolerations, list)
assert isinstance(spec.config.pytorch.default_worker_tolerations[0], dict)
assert isinstance(spec.config.pytorch.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.pytorch.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.pytorch.default_worker_resources.cpu.requests == 3
assert spec.config.pytorch.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.pytorch.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.pytorch.default_worker_resources.memory.requests == 256
assert spec.config.pytorch.default_worker_resources.memory.limits == 256
assert spec.config.pytorch.worker_tolerations[2] == [{'operator': 'Exists'}]
assert isinstance(spec.config.pytorch.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.pytorch.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.pytorch.worker_resources[3].memory.requests == 300
assert spec.config.pytorch.worker_resources[3].memory.limits == 300
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = PytorchSpecification.get_worker_resources(
environment=spec.config.pytorch,
cluster=cluster,
is_distributed=is_distributed
)
worker_tolerations = PytorchSpecification.get_worker_tolerations(
environment=spec.config.pytorch,
cluster=cluster,
is_distributed=is_distributed
)
worker_node_selectors = PytorchSpecification.get_worker_node_selectors(
environment=spec.config.pytorch,
cluster=cluster,
is_distributed=is_distributed
)
worker_affinities = PytorchSpecification.get_worker_affinities(
environment=spec.config.pytorch,
cluster=cluster,
is_distributed=is_distributed
)
assert worker_node_selectors == {}
assert worker_affinities == {}
assert len(worker_tolerations) == spec.config.pytorch.n_workers
assert len(worker_resources) == spec.config.pytorch.n_workers
assert set(worker_resources.values()) == {
spec.config.pytorch.default_worker_resources,
spec.config.pytorch.worker_resources[3]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4, 'limits': 2 + 3 * 4},
'memory': {'requests': 300 + 256 * 4, 'limits': 300 + 256 * 4},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5}, True)
def test_distributed_pytorch_with_node_selectors_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_pytorch_with_node_selectors_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.environment.node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.master_node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.framework == ExperimentFramework.PYTORCH
assert spec.config.pytorch.n_workers == 5
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert isinstance(spec.config.pytorch.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.pytorch.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.pytorch.default_worker_resources.cpu.requests == 3
assert spec.config.pytorch.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.pytorch.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.pytorch.default_worker_resources.memory.requests == 256
assert spec.config.pytorch.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.pytorch.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.pytorch.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.pytorch.worker_resources[3].memory.requests == 300
assert spec.config.pytorch.worker_resources[3].memory.limits == 300
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = PytorchSpecification.get_worker_resources(
environment=spec.config.pytorch,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_resources) == spec.config.pytorch.n_workers
assert set(worker_resources.values()) == {
spec.config.pytorch.default_worker_resources,
spec.config.pytorch.worker_resources[3]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4, 'limits': 2 + 3 * 4},
'memory': {'requests': 300 + 256 * 4, 'limits': 300 + 256 * 4},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5}, True)
assert (spec.config.pytorch.default_worker.node_selector ==
{'polyaxon.com': 'node_for_worker_tasks'})
assert (spec.config.pytorch.worker_node_selectors[2] ==
{'polyaxon.com': 'node_for_worker_task_2'})
worker_node_selectors = PytorchSpecification.get_worker_node_selectors(
environment=spec.config.pytorch,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_node_selectors) == spec.config.pytorch.n_workers
assert set(tuple(i.items()) for i in worker_node_selectors.values()) == {
tuple(spec.config.pytorch.default_worker.node_selector.items()),
tuple(spec.config.pytorch.worker_node_selectors[2].items())}
def test_distributed_mpi_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_mpi_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.framework == ExperimentFramework.TENSORFLOW
assert spec.backend == ExperimentBackend.MPI
assert spec.config.mpi.n_workers == 8
assert spec.environment.node_selector is None
assert spec.environment.tolerations is None
assert spec.environment.affinity is None
assert spec.environment.resources is None
assert spec.config.mpi.default_worker_node_selector is None
assert spec.config.mpi.default_worker_affinity is None
assert isinstance(spec.config.mpi.default_worker_tolerations, list)
assert isinstance(spec.config.mpi.default_worker_tolerations[0], dict)
assert isinstance(spec.config.mpi.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.mpi.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.mpi.default_worker_resources.cpu.requests == 3
assert spec.config.mpi.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.mpi.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.mpi.default_worker_resources.memory.requests == 256
assert spec.config.mpi.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.mpi.default_worker_resources.gpu,
K8SResourcesConfig)
assert spec.config.mpi.default_worker_resources.gpu.requests == 4
assert spec.config.mpi.default_worker_resources.gpu.limits == 4
assert spec.config.mpi.worker_tolerations == {}
assert spec.config.mpi.worker_resources == {}
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = PytorchSpecification.get_worker_resources(
environment=spec.config.mpi,
cluster=cluster,
is_distributed=is_distributed
)
worker_tolerations = PytorchSpecification.get_worker_tolerations(
environment=spec.config.mpi,
cluster=cluster,
is_distributed=is_distributed
)
worker_node_selectors = PytorchSpecification.get_worker_node_selectors(
environment=spec.config.mpi,
cluster=cluster,
is_distributed=is_distributed
)
worker_affinities = PytorchSpecification.get_worker_affinities(
environment=spec.config.mpi,
cluster=cluster,
is_distributed=is_distributed
)
assert worker_node_selectors == {}
assert worker_affinities == {}
assert len(worker_tolerations) == spec.config.mpi.n_workers
assert len(worker_resources) == spec.config.mpi.n_workers
assert set(worker_resources.values()) == {spec.config.mpi.default_worker_resources}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 3 * 8, 'limits': 3 * 8},
'memory': {'requests': 256 * 8, 'limits': 256 * 8},
'gpu': {'requests': 4 * 8, 'limits': 4 * 8},
}
assert spec.cluster_def == ({TaskType.WORKER: 8}, True)
def test_distributed_mpi_with_node_selectors_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_mpi_with_node_selectors_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert spec.framework == ExperimentFramework.PYTORCH
assert spec.backend == ExperimentBackend.MPI
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.environment.node_selector is None
assert spec.master_node_selector is None
assert spec.config.mpi.n_workers == 4
assert spec.environment.resources is None
assert isinstance(spec.config.mpi.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.mpi.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.mpi.default_worker_resources.cpu.requests == 3
assert spec.config.mpi.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.mpi.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.mpi.default_worker_resources.memory.requests == 256
assert spec.config.mpi.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.mpi.default_worker_resources.gpu,
K8SResourcesConfig)
assert spec.config.mpi.default_worker_resources.gpu.requests == 2
assert spec.config.mpi.default_worker_resources.gpu.limits == 2
assert spec.config.mpi.worker_resources == {}
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = PytorchSpecification.get_worker_resources(
environment=spec.config.mpi,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_resources) == spec.config.mpi.n_workers
assert set(worker_resources.values()) == {spec.config.mpi.default_worker_resources}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 3 * 4, 'limits': 3 * 4},
'memory': {'requests': 256 * 4, 'limits': 256 * 4},
'gpu': {'requests': 4 * 2, 'limits': 4 * 2},
}
assert spec.cluster_def == ({TaskType.WORKER: 4}, True)
assert (spec.config.mpi.default_worker.node_selector ==
{'polyaxon.com': 'node_for_worker_tasks'})
worker_node_selectors = MPISpecification.get_worker_node_selectors(
environment=spec.config.mpi,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_node_selectors) == spec.config.mpi.n_workers
assert set(tuple(i.items()) for i in worker_node_selectors.values()) == {
tuple(spec.config.mpi.default_worker.node_selector.items())}
def test_distributed_mxnet_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_mxnet_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.framework == ExperimentFramework.MXNET
assert spec.config.mxnet.n_workers == 5
assert spec.config.mxnet.n_ps == 10
assert spec.environment.node_selector is None
assert spec.environment.tolerations is None
assert isinstance(spec.environment.affinity, dict)
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert spec.config.mxnet.default_worker_node_selector is None
assert spec.config.mxnet.default_worker_affinity is None
assert isinstance(spec.config.mxnet.default_worker_tolerations, list)
assert isinstance(spec.config.mxnet.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.mxnet.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.mxnet.default_worker_resources.cpu.requests == 3
assert spec.config.mxnet.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.mxnet.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.mxnet.default_worker_resources.memory.requests == 256
assert spec.config.mxnet.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.mxnet.worker_tolerations[2], list)
assert spec.config.mxnet.worker_tolerations[2] == [{'operator': 'Exists'}]
assert isinstance(spec.config.mxnet.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.mxnet.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.mxnet.worker_resources[3].memory.requests == 300
assert spec.config.mxnet.worker_resources[3].memory.limits == 300
assert spec.config.mxnet.default_ps_node_selector is None
assert spec.config.mxnet.default_ps_affinity is None
assert isinstance(spec.config.mxnet.default_ps_tolerations, list)
assert isinstance(spec.config.mxnet.default_ps_resources,
PodResourcesConfig)
assert isinstance(spec.config.mxnet.default_ps_resources.cpu,
K8SResourcesConfig)
assert spec.config.mxnet.default_ps_resources.cpu.requests == 2
assert spec.config.mxnet.default_ps_resources.cpu.limits == 4
assert isinstance(spec.config.mxnet.ps_resources[9],
PodResourcesConfig)
assert isinstance(spec.config.mxnet.ps_resources[9].memory,
K8SResourcesConfig)
assert spec.config.mxnet.ps_resources[9].memory.requests == 512
assert spec.config.mxnet.ps_resources[9].memory.limits == 1024
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = MXNetSpecification.get_worker_resources(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
worker_node_selectors = MXNetSpecification.get_worker_node_selectors(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
worker_affinities = MXNetSpecification.get_worker_affinities(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
worker_tolerations = MXNetSpecification.get_worker_tolerations(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
assert worker_node_selectors == {}
assert worker_affinities == {}
assert len(worker_tolerations) == spec.config.mxnet.n_workers
assert len(worker_resources) == spec.config.mxnet.n_workers
assert set(worker_resources.values()) == {
spec.config.mxnet.default_worker_resources,
spec.config.mxnet.worker_resources[3]}
ps_resources = MXNetSpecification.get_ps_resources(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
ps_node_selectors = MXNetSpecification.get_ps_node_selectors(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
ps_affinities = MXNetSpecification.get_ps_affinities(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
ps_tolerations = MXNetSpecification.get_ps_tolerations(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
assert ps_node_selectors == {}
assert ps_affinities == {}
assert len(ps_tolerations) == spec.config.mxnet.n_ps
assert len(ps_resources) == spec.config.mxnet.n_ps
assert set(ps_resources.values()) == {
spec.config.mxnet.default_ps_resources,
spec.config.mxnet.ps_resources[9]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4 + 2 * 9, 'limits': 2 + 3 * 4 + 4 * 9},
'memory': {'requests': 300 + 256 * 4 + 512, 'limits': 300 + 256 * 4 + 1024},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5,
TaskType.SERVER: 10}, True)
def test_distributed_mxnet_with_node_selectors_passes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/distributed_mxnet_with_node_selectors_file.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_experiment
assert isinstance(spec.logging, LoggingConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.environment.node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.master_node_selector == {'polyaxon.com': 'node_for_master_task'}
assert spec.framework == ExperimentFramework.MXNET
assert spec.config.mxnet.n_workers == 5
assert spec.config.mxnet.n_ps == 10
assert isinstance(spec.environment.resources, PodResourcesConfig)
assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig)
assert spec.environment.resources.cpu.requests == 1
assert spec.environment.resources.cpu.limits == 2
assert isinstance(spec.config.mxnet.default_worker_resources,
PodResourcesConfig)
assert isinstance(spec.config.mxnet.default_worker_resources.cpu,
K8SResourcesConfig)
assert spec.config.mxnet.default_worker_resources.cpu.requests == 3
assert spec.config.mxnet.default_worker_resources.cpu.limits == 3
assert isinstance(spec.config.mxnet.default_worker_resources.memory,
K8SResourcesConfig)
assert spec.config.mxnet.default_worker_resources.memory.requests == 256
assert spec.config.mxnet.default_worker_resources.memory.limits == 256
assert isinstance(spec.config.mxnet.worker_resources[3], PodResourcesConfig)
assert isinstance(spec.config.mxnet.worker_resources[3].memory,
K8SResourcesConfig)
assert spec.config.mxnet.worker_resources[3].memory.requests == 300
assert spec.config.mxnet.worker_resources[3].memory.limits == 300
assert isinstance(spec.config.mxnet.default_ps_resources,
PodResourcesConfig)
assert isinstance(spec.config.mxnet.default_ps_resources.cpu,
K8SResourcesConfig)
assert spec.config.mxnet.default_ps_resources.cpu.requests == 2
assert spec.config.mxnet.default_ps_resources.cpu.limits == 4
assert isinstance(spec.config.mxnet.ps_resources[9],
PodResourcesConfig)
assert isinstance(spec.config.mxnet.ps_resources[9].memory,
K8SResourcesConfig)
assert spec.config.mxnet.ps_resources[9].memory.requests == 512
assert spec.config.mxnet.ps_resources[9].memory.limits == 1024
# check that properties for return list of configs and resources is working
cluster, is_distributed = spec.cluster_def
worker_resources = MXNetSpecification.get_worker_resources(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_resources) == spec.config.mxnet.n_workers
assert set(worker_resources.values()) == {
spec.config.mxnet.default_worker_resources,
spec.config.mxnet.worker_resources[3]}
ps_resources = MXNetSpecification.get_ps_resources(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
assert len(ps_resources) == spec.config.mxnet.n_ps
assert set(ps_resources.values()) == {
spec.config.mxnet.default_ps_resources,
spec.config.mxnet.ps_resources[9]}
# Check total resources
assert spec.total_resources == {
'cpu': {'requests': 1 + 3 * 4 + 2 * 9, 'limits': 2 + 3 * 4 + 4 * 9},
'memory': {'requests': 300 + 256 * 4 + 512, 'limits': 300 + 256 * 4 + 1024},
}
assert spec.cluster_def == ({TaskType.MASTER: 1,
TaskType.WORKER: 5,
TaskType.SERVER: 10}, True)
assert (spec.config.mxnet.default_worker.node_selector ==
{'polyaxon.com': 'node_for_worker_tasks'})
assert (spec.config.mxnet.worker_node_selectors[2] ==
{'polyaxon.com': 'node_for_worker_task_2'})
assert (spec.config.mxnet.default_ps.node_selector ==
{'polyaxon.com': 'node_for_ps_tasks'})
assert (spec.config.mxnet.ps_node_selectors[2] ==
{'polyaxon.com': 'node_for_ps_task_2'})
worker_node_selectors = MXNetSpecification.get_worker_node_selectors(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
assert len(worker_node_selectors) == spec.config.mxnet.n_workers
assert set(tuple(i.items()) for i in worker_node_selectors.values()) == {
tuple(spec.config.mxnet.default_worker.node_selector.items()),
tuple(spec.config.mxnet.worker_node_selectors[2].items())}
ps_node_selectors = MXNetSpecification.get_ps_node_selectors(
environment=spec.config.mxnet,
cluster=cluster,
is_distributed=is_distributed
)
assert len(ps_node_selectors) == spec.config.mxnet.n_ps
assert set(tuple(i.items()) for i in ps_node_selectors.values()) == {
tuple(spec.config.mxnet.default_ps.node_selector.items()),
tuple(spec.config.mxnet.ps_node_selectors[2].items())}
def test_notebook_job_with_node_selectors(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/notebook_with_custom_environment.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_notebook
assert spec.is_notebook is True
assert spec.backend is None
assert spec.logging is None
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert isinstance(spec.build, BuildConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.artifact_refs == ['outputs1']
assert spec.data_refs == ['data1', 'data2']
assert spec.secret_refs == ['secret1', 'secret2']
assert spec.config_map_refs == ['config_map1', 'config_map2']
node_selector = {'polyaxon.com': 'node_for_notebook_jobs'}
assert spec.environment.node_selector == node_selector
assert spec.node_selector == node_selector
resources = {
'cpu': {'requests': 1, 'limits': 2},
'memory': {'requests': 200, 'limits': 200},
}
assert spec.environment.resources.to_dict() == resources
assert spec.resources.to_dict() == resources
affinity = {
'nodeAffinity': {'requiredDuringSchedulingIgnoredDuringExecution': {}}
}
assert spec.environment.affinity == affinity
assert spec.affinity == affinity
tolerations = [{'key': 'key', 'operator': 'Exists'}]
assert spec.environment.tolerations == tolerations
assert spec.tolerations == tolerations
def test_jupyter_lab_job_with_node_selectors(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/jupyterlab_with_custom_environment.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_notebook
assert spec.is_notebook is True
assert spec.backend == 'lab'
assert spec.logging is None
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert isinstance(spec.build, BuildConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.artifact_refs == ['outputs1']
assert spec.data_refs == ['data1', 'data2']
assert spec.secret_refs == ['secret1', 'secret2']
assert spec.config_map_refs == ['config_map1', 'config_map2']
node_selector = {'polyaxon.com': 'node_for_notebook_jobs'}
labels = {"key1": "value1", "key2": "value2"}
annotations = {'anno1': 'value1'}
assert spec.environment.node_selector == node_selector
assert spec.node_selector == node_selector
assert spec.environment.labels == labels
assert spec.labels == labels
assert spec.environment.annotations == annotations
assert spec.annotations == annotations
resources = {
'cpu': {'requests': 1, 'limits': 2},
'memory': {'requests': 200, 'limits': 200},
}
assert spec.environment.resources.to_dict() == resources
assert spec.resources.to_dict() == resources
affinity = {
'nodeAffinity': {'requiredDuringSchedulingIgnoredDuringExecution': {}}
}
assert spec.environment.affinity == affinity
assert spec.affinity == affinity
tolerations = [{'key': 'key', 'operator': 'Exists'}]
assert spec.environment.tolerations == tolerations
assert spec.tolerations == tolerations
def test_tensorboard_job_with_node_selectors(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/tensorboard_with_custom_environment.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_tensorboard
assert spec.is_tensorboard is True
assert spec.logging is None
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert isinstance(spec.build, BuildConfig)
assert isinstance(spec.environment, EnvironmentConfig)
node_selector = {'polyaxon.com': 'node_for_tensorboard_jobs'}
assert spec.environment.node_selector == node_selector
assert spec.node_selector == node_selector
resources = {
'cpu': {'requests': 1, 'limits': 2},
'memory': {'requests': 200, 'limits': 200},
}
assert spec.environment.resources.to_dict() == resources
assert spec.resources.to_dict() == resources
affinity = {
'nodeAffinity': {'requiredDuringSchedulingIgnoredDuringExecution': {}}
}
assert spec.environment.affinity == affinity
assert spec.affinity == affinity
tolerations = [{'key': 'key', 'operator': 'Exists'}]
assert spec.environment.tolerations == tolerations
assert spec.tolerations == tolerations
def test_run_job_with_node_selectors(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/run_with_custom_environment.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_job
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert spec.logging is None
assert isinstance(spec.build, BuildConfig)
assert isinstance(spec.run, RunConfig)
assert isinstance(spec.environment, EnvironmentConfig)
assert spec.artifact_refs == ['outputs1']
assert spec.data_refs == ['data1', 'data2']
assert spec.secret_refs == ['secret1', 'secret2']
assert spec.config_map_refs == ['config_map1', 'config_map2']
node_selector = {'polyaxon.com': 'node_for_jobs'}
assert spec.environment.node_selector == node_selector
assert spec.node_selector == node_selector
resources = {
'cpu': {'requests': 1, 'limits': 2},
'memory': {'requests': 200, 'limits': 200},
}
assert spec.environment.resources.to_dict() == resources
assert spec.resources.to_dict() == resources
affinity = {
'nodeAffinity': {'requiredDuringSchedulingIgnoredDuringExecution': {}}
}
assert spec.environment.affinity == affinity
assert spec.affinity == affinity
tolerations = [{'key': 'key', 'operator': 'Exists'}]
assert spec.environment.tolerations == tolerations
assert spec.tolerations == tolerations
def test_build_job_with_custom_environment(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/build_with_custom_environment.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_build is True
assert spec.logging is None
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert isinstance(spec.config, BuildConfig)
assert isinstance(spec.environment, EnvironmentConfig)
node_selector = {'polyaxon.com': 'node_for_build_jobs'}
assert spec.environment.node_selector == node_selector
assert spec.node_selector == node_selector
resources = {
'cpu': {'requests': 1, 'limits': 2},
'memory': {'requests': 200, 'limits': 200},
}
assert spec.environment.resources.to_dict() == resources
assert spec.resources.to_dict() == resources
affinity = {
'nodeAffinity': {'requiredDuringSchedulingIgnoredDuringExecution': {}}
}
assert spec.environment.affinity == affinity
assert spec.affinity == affinity
tolerations = [{'key': 'key', 'operator': 'Exists'}]
assert spec.environment.tolerations == tolerations
assert spec.tolerations == tolerations
def test_build_job_with_context_and_dockerfile(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/build_with_context_and_dockerfile.yml'))
spec = plxfile.specification
spec.apply_context()
assert spec.version == 1
assert spec.is_build is True
assert spec.logging is None
assert spec.config.dockerfile == 'dockerfiles/Dockerfile'
assert spec.config.context == 'module1'
assert sorted(spec.tags) == sorted(['foo', 'bar'])
assert isinstance(spec.environment, EnvironmentConfig)
node_selector = {'polyaxon.com': 'node_for_build_jobs'}
assert spec.environment.node_selector == node_selector
assert spec.node_selector == node_selector
resources = {
'cpu': {'requests': 1, 'limits': 2},
'memory': {'requests': 200, 'limits': 200},
}
assert spec.environment.resources.to_dict() == resources
assert spec.resources.to_dict() == resources
affinity = {
'nodeAffinity': {'requiredDuringSchedulingIgnoredDuringExecution': {}}
}
assert spec.environment.affinity == affinity
assert spec.affinity == affinity
tolerations = [{'key': 'key', 'operator': 'Exists'}]
assert spec.environment.tolerations == tolerations
assert spec.tolerations == tolerations
def test_specification_with_quotes(self):
plxfile = PolyaxonFile(os.path.abspath(
'tests/fixtures/plain/polyaxonfile_with_quotes.yaml'))
spec = plxfile.specification
spec.apply_context()
assert spec.run.cmd == ['python -c "print(\'Tweet tweet\')"']
spec = ExperimentSpecification(spec.raw_data)
spec.apply_context()
assert spec.run.cmd == ['python -c "print(\'Tweet tweet\')"']
| 46.627262
| 100
| 0.657836
| 8,792
| 82,437
| 5.981119
| 0.032416
| 0.09261
| 0.063895
| 0.041532
| 0.937664
| 0.924181
| 0.90507
| 0.887156
| 0.863177
| 0.839406
| 0
| 0.014756
| 0.246176
| 82,437
| 1,767
| 101
| 46.65365
| 0.831453
| 0.015721
| 0
| 0.755686
| 0
| 0
| 0.076438
| 0.037652
| 0
| 0
| 0
| 0.000566
| 0.497076
| 1
| 0.026641
| false
| 0.020143
| 0.012995
| 0
| 0.040286
| 0.001949
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b76dc34bbb08a3baa8ed88e92f2f90213f985c1b
| 44,379
|
py
|
Python
|
test/basic_catalog_test.py
|
kbaseIncubator/registrar
|
0471636a02d4f001cd2180978606cedd45578ebe
|
[
"MIT"
] | 1
|
2016-06-13T15:30:26.000Z
|
2016-06-13T15:30:26.000Z
|
test/basic_catalog_test.py
|
kbaseIncubator/registrar
|
0471636a02d4f001cd2180978606cedd45578ebe
|
[
"MIT"
] | 61
|
2015-11-05T22:37:56.000Z
|
2020-07-24T19:05:47.000Z
|
test/basic_catalog_test.py
|
kbaseIncubator/registrar
|
0471636a02d4f001cd2180978606cedd45578ebe
|
[
"MIT"
] | 14
|
2015-10-30T00:11:26.000Z
|
2021-04-23T03:15:47.000Z
|
import unittest
from biokbase.catalog.Impl import Catalog
from biokbase.catalog.version import CATALOG_VERSION
from catalog_test_util import CatalogTestUtil
# tests all the basic get methods
class BasicCatalogTest(unittest.TestCase):
def test_version(self):
self.assertEqual(self.catalog.version(self.cUtil.anonymous_ctx()), [CATALOG_VERSION])
def test_is_registered(self):
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{'module_name': 'onerepotest'}), [1])
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{
'git_url': 'https://github.com/kbaseIncubator/onerepotest'
}), [1])
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{'module_name': 'onerepotest',
'git_url': 'https://github.com/kbaseIncubator/onerepotest'
}),[1])
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{'module_name': 'wrong_name'}),[0])
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{'module_name': 'wrong_name',
'git_url': 'https://github.com/kbaseIncubator/onerepotest'
}),[0])
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{'module_name': 'onerepotest',
'git_url': 'wrong_url'}),[0])
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{'git_url': 'wrong_url'}),[0])
self.assertEqual(self.catalog.is_registered(self.cUtil.anonymous_ctx(),
{}),[0])
def test_list_requested_releases(self):
requested_releases = self.catalog.list_requested_releases(self.cUtil.anonymous_ctx())[0]
found_modules = []
for r in requested_releases:
self.assertIn('module_name', r)
found_modules.append(r['module_name'])
self.assertIn('owners', r)
self.assertIn('timestamp', r)
self.assertIn('git_url', r)
self.assertIn('git_commit_message', r)
self.assertIn('git_commit_hash', r)
if r['module_name'] == 'pending_first_release':
self.assertEqual(r['git_commit_hash'], 'b843888e962642d665a3b0bd701ee630c01835e6')
self.assertEqual(r['git_commit_message'], 'update for testing')
self.assertEqual(r['git_url'], 'https://github.com/kbaseIncubator/pending_Release')
self.assertEqual(r['timestamp'], 1445023985597)
self.assertIn('kbasetest', r['owners'])
if r['module_name'] == 'pending_second_release':
self.assertEqual(r['git_url'],
'https://github.com/kbaseIncubator/pending_second_release')
self.assertIn('rsutormin', r['owners'])
self.assertIn('wstester1', r['owners'])
self.assertIn('pending_first_release', found_modules)
self.assertIn('pending_second_release', found_modules)
def test_list_basic_module_info(self):
# default should include all modules that are released
default = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{})[0]
module_names = []
found_dynamic_service = False
for m in default:
module_names.append(m['module_name'])
if m['module_name'] == 'DynamicService':
self.assertEqual(m['dev'],
{'git_commit_hash': 'b06c5f9daf603a4d206071787c3f6184000bf128'})
self.assertEqual(m['beta'],
{'git_commit_hash': 'b843888e962642d665a3b0bd701ee630c01835e6'})
self.assertEqual(m['release'],
{'git_commit_hash': '49dc505febb8f4cccb2078c58ded0de3320534d7'})
self.assertEqual(sorted(m['owners']), ['rsutormin', 'wstester2'])
self.assertEqual(m['language'], 'python')
self.assertEqual(m['dynamic_service'], 1)
self.assertEqual(len(m['release_version_list']), 4)
found_dynamic_service = True
self.assertTrue(found_dynamic_service)
self.assertEqual(
",".join(sorted(module_names)),
",".join(['DynamicService', 'DynamicService2', 'onerepotest', 'pending_second_release',
'release_history'])
)
# all released and unreleased
include_unreleased = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'include_unreleased': 1})[0]
module_names = []
for m in include_unreleased:
module_names.append(m['module_name'])
self.assertEqual(
",".join(sorted(module_names)),
",".join(sorted(['DynamicService', 'DynamicService2', 'denied_release', 'onerepotest',
'pending_first_release', 'pending_second_release',
'registration_error', 'registration_in_progress', 'release_history']))
)
# include unreleased and failed first registration
include_unreleased = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'include_unreleased': 1,
'include_modules_with_no_name_set': 1
})[0]
git_urls = []
for m in include_unreleased:
git_urls.append(m['git_url'])
self.assertEqual(
",".join(sorted(git_urls)),
",".join(sorted(['https://github.com/jplfaria/ElectronicAnnotationMethods-',
'https://github.com/kbaseIncubator/denied_release',
'https://github.com/kbaseIncubator/dynamic_service',
'https://github.com/kbaseIncubator/dynamic_service2',
'https://github.com/kbaseIncubator/onerepotest',
'https://github.com/kbaseIncubator/pending_Release',
'https://github.com/kbaseIncubator/pending_second_release',
'https://github.com/kbaseIncubator/registration_error',
'https://github.com/kbaseIncubator/registration_in_progress',
'https://github.com/kbaseIncubator/release_history']))
)
# no released and no unreleased
include_nothing = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'include_released': 0})[0]
module_names = []
for m in include_nothing:
module_names.append(m['module_name'])
self.assertEqual(
",".join(sorted(module_names)),
",".join([])
)
# only unreleased modules
only_unreleased = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'include_released': 0,
'include_unreleased': 1})[0]
module_names = []
for m in only_unreleased:
module_names.append(m['module_name'])
self.assertEqual(
",".join(sorted(module_names)),
",".join(sorted(['denied_release', 'pending_first_release', 'registration_error',
'registration_in_progress']))
)
inactive = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'include_disabled': 1,
'include_released': 0,
'include_unreleased': 1})[0]
module_names = []
for m in inactive:
module_names.append(m['module_name'])
self.assertEqual(
",".join(sorted(module_names)),
",".join(sorted(['inactive_module', 'denied_release', 'pending_first_release',
'registration_error',
'registration_in_progress']))
)
# check for owner search
shortlist = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'owners': ['kbasetest'],
'include_unreleased': 0})[0]
module_names = []
for m in shortlist:
module_names.append(m['module_name'])
self.assertEqual(
",".join(sorted(module_names)),
",".join([])
)
shortlist = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'owners': ['kbasetest'],
'include_unreleased': 1})[0]
module_names = []
for m in shortlist:
module_names.append(m['module_name'])
self.assertEqual(
",".join(sorted(module_names)),
",".join(['pending_first_release'])
)
shortlist = self.catalog.list_basic_module_info(self.cUtil.anonymous_ctx(),
{'owners': ['kbasetest', 'wstester1'],
'include_unreleased': 1})[0]
module_names = []
for m in shortlist:
module_names.append(m['module_name'])
self.assertEqual(
",".join(sorted(module_names)),
",".join(['denied_release', 'onerepotest', 'pending_first_release',
'pending_second_release', 'registration_error', 'registration_in_progress'])
)
def test_get_module_state(self):
state = self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{'module_name': 'onerepotest'})[0]
self.assertEqual(state['active'], 1)
self.assertEqual(state['release_approval'], 'approved')
self.assertEqual(state['review_message'], '')
self.assertEqual(state['registration'], 'complete')
self.assertEqual(state['error_message'], '')
state = self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{'module_name': 'inactive_module'})[0]
self.assertEqual(state['active'], 0)
self.assertEqual(state['release_approval'], 'not_requested')
self.assertEqual(state['review_message'], '')
self.assertEqual(state['registration'], 'complete')
self.assertEqual(state['error_message'], '')
state = self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{
'git_url': 'https://github.com/kbaseIncubator/registration_in_progress'
})[0]
self.assertEqual(state['active'], 1)
self.assertEqual(state['release_approval'], 'not_requested')
self.assertEqual(state['review_message'], '')
self.assertEqual(state['registration'], 'building: doing stuff')
self.assertEqual(state['error_message'], '')
state = self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{
'git_url': 'https://github.com/kbaseIncubator/pending_second_release'
})[0]
self.assertEqual(state['active'], 1)
self.assertEqual(state['release_approval'], 'under_review')
self.assertEqual(state['review_message'], '')
self.assertEqual(state['registration'], 'complete')
self.assertEqual(state['error_message'], '')
# test various fail cases where a module does not exist
with self.assertRaises(ValueError) as e:
self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{'module_name': 'not_a_module'})
self.assertEqual(str(e.exception),
'Operation failed - module/repo is not registered.')
with self.assertRaises(ValueError) as e:
self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{'git_url': 'not_a_giturl'})
self.assertEqual(str(e.exception),
'Operation failed - module/repo is not registered.')
with self.assertRaises(ValueError) as e:
self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{})
self.assertEqual(str(e.exception),
'Operation failed - module/repo is not registered.')
with self.assertRaises(ValueError) as e:
self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{'module_name': 'not_a_module',
'git_url': 'https://github.com/kbaseIncubator/registration_in_progress'})
self.assertEqual(str(e.exception),
'Operation failed - module/repo is not registered.')
with self.assertRaises(ValueError) as e:
self.catalog.get_module_state(self.cUtil.anonymous_ctx(),
{'module_name': 'onerepotest', 'git_url': 'not_a_url'})
self.assertEqual(str(e.exception),
'Operation failed - module/repo is not registered.')
def test_get_module_info(self):
info = self.catalog.get_module_info(self.cUtil.anonymous_ctx(),
{'module_name': 'onerepotest'})[0]
self.assertEqual(info['module_name'], 'onerepotest')
self.assertEqual(info['git_url'], 'https://github.com/kbaseIncubator/onerepotest')
self.assertEqual(info['description'],
'KBase module for integration tests of docker-based service/async method calls')
self.assertEqual(info['owners'], ['rsutormin', 'wstester1'])
self.assertEqual(info['language'], 'python')
self.assertEqual(info['release']['git_commit_hash'],
'49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(info['release']['timestamp'], 1445022818884)
self.assertEqual(info['release']['git_commit_message'], 'added username for testing')
self.assertEqual(info['release']['version'], '0.0.1')
self.assertEqual(info['release']['narrative_methods'], ['send_data'])
self.assertEqual(info['beta']['git_commit_hash'],
'b843888e962642d665a3b0bd701ee630c01835e6')
self.assertEqual(info['beta']['timestamp'], 1445023985597)
self.assertEqual(info['beta']['git_commit_message'], 'update for testing')
self.assertEqual(info['beta']['version'], '0.0.1')
self.assertEqual(info['beta']['narrative_methods'], ['send_data'])
self.assertEqual(info['dev']['git_commit_hash'],
'b06c5f9daf603a4d206071787c3f6184000bf128')
self.assertEqual(info['dev']['timestamp'], 1445024094055)
self.assertEqual(info['dev']['git_commit_message'], 'another change')
self.assertEqual(info['dev']['version'], '0.0.1')
self.assertEqual(info['dev']['narrative_methods'], ['send_data'])
info = self.catalog.get_module_info(self.cUtil.anonymous_ctx(),
{
'git_url': 'https://github.com/kbaseIncubator/pending_Release'
})[0]
self.assertEqual(info['module_name'], 'pending_first_release')
self.assertEqual(info['git_url'], 'https://github.com/kbaseIncubator/pending_Release')
self.assertEqual(info['description'], ' something')
self.assertEqual(info['owners'], ['kbasetest'])
self.assertEqual(info['language'], 'perl')
self.assertTrue(info['release'] is None)
def test_get_module_version(self):
# fetch without version info, should return latest release version
version = self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history'})[0]
self.assertEqual(version['timestamp'], 1445022818884)
self.assertEqual(version['version'], "0.0.3")
self.assertEqual(version['narrative_methods'], ['send_data'])
self.assertEqual(version['local_functions'], [])
self.assertEqual(version['module_language'], 'python')
self.assertEqual(version['module_name'], 'release_history')
self.assertEqual(version['notes'], '')
self.assertEqual(version['registration_id'], '1445022818884_4123')
self.assertEqual(version['release_tags'], ['release'])
self.assertEqual(version['release_timestamp'], 1445022818884)
self.assertEqual(version['docker_img_name'],
'dockerhub-ci.kbase.us/kbase:release_history.49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(version['dynamic_service'], 0)
self.assertEqual(version['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(version['git_commit_message'], "added username for testing")
self.assertEqual(version['git_url'], "https://github.com/kbaseIncubator/release_history")
# should work based on git_url as well
version = self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{
'git_url': 'https://github.com/kbaseIncubator/release_history'
})[0]
self.assertEqual(version['timestamp'], 1445022818884)
self.assertEqual(version['version'], "0.0.3")
self.assertEqual(version['narrative_methods'], ['send_data'])
self.assertEqual(version['local_functions'], [])
self.assertEqual(version['module_language'], 'python')
self.assertEqual(version['module_name'], 'release_history')
self.assertEqual(version['notes'], '')
self.assertEqual(version['registration_id'], '1445022818884_4123')
self.assertEqual(version['release_tags'], ['release'])
self.assertEqual(version['release_timestamp'], 1445022818884)
self.assertEqual(version['docker_img_name'],
'dockerhub-ci.kbase.us/kbase:release_history.49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(version['dynamic_service'], 0)
self.assertEqual(version['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(version['git_commit_message'], "added username for testing")
self.assertEqual(version['git_url'], "https://github.com/kbaseIncubator/release_history")
# get a specific tag
version = self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'dev',
'include_module_description': 0,
'include_compilation_report': 0})[0]
self.assertEqual(version['timestamp'], 1445024094055)
self.assertEqual(version['version'], "0.0.5")
self.assertEqual(version['narrative_methods'], ['send_data2'])
self.assertEqual(version['local_functions'], [])
self.assertEqual(version['module_language'], 'python')
self.assertEqual(version['module_name'], 'release_history')
self.assertEqual(version['notes'], '')
self.assertEqual(version['registration_id'], '1445024094055_4123')
self.assertEqual(version['release_tags'], ['dev'])
self.assertEqual(version['release_timestamp'], None)
self.assertEqual(version['docker_img_name'],
'dockerhub-ci.kbase.us/kbase:release_history.b06c5f9daf603a4d206071787c3f6184000bf128')
self.assertEqual(version['dynamic_service'], 0)
self.assertEqual(version['git_commit_hash'], "b06c5f9daf603a4d206071787c3f6184000bf128")
self.assertEqual(version['git_commit_message'], "another change")
self.assertEqual(version['git_url'], "https://github.com/kbaseIncubator/release_history")
# get by git commit hash
version = self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': '49dc505febb8f4cccb2078c58ded0de3320534d7',
'include_module_description': 0,
'include_compilation_report': 0})[0]
self.assertEqual(version['timestamp'], 1445022818884)
self.assertEqual(version['version'], "0.0.3")
self.assertEqual(version['narrative_methods'], ['send_data'])
self.assertEqual(version['local_functions'], [])
self.assertEqual(version['module_language'], 'python')
self.assertEqual(version['module_name'], 'release_history')
self.assertEqual(version['notes'], '')
self.assertEqual(version['registration_id'], '1445022818884_4123')
self.assertEqual(version['release_tags'], ['release'])
self.assertEqual(version['release_timestamp'], 1445022818884)
self.assertEqual(version['docker_img_name'],
'dockerhub-ci.kbase.us/kbase:release_history.49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(version['dynamic_service'], 0)
self.assertEqual(version['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(version['git_commit_message'], "added username for testing")
self.assertEqual(version['git_url'], "https://github.com/kbaseIncubator/release_history")
# get by exact semantic version
version = self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': '0.0.3',
'include_module_description': 0,
'include_compilation_report': 0})[0]
self.assertEqual(version['timestamp'], 1445022818884)
self.assertEqual(version['version'], "0.0.3")
self.assertEqual(version['narrative_methods'], ['send_data'])
self.assertEqual(version['local_functions'], [])
self.assertEqual(version['module_language'], 'python')
self.assertEqual(version['module_name'], 'release_history')
self.assertEqual(version['notes'], '')
self.assertEqual(version['registration_id'], '1445022818884_4123')
self.assertEqual(version['release_tags'], ['release'])
self.assertEqual(version['release_timestamp'], 1445022818884)
self.assertEqual(version['docker_img_name'],
'dockerhub-ci.kbase.us/kbase:release_history.49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(version['dynamic_service'], 0)
self.assertEqual(version['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(version['git_commit_message'], "added username for testing")
self.assertEqual(version['git_url'], "https://github.com/kbaseIncubator/release_history")
# get by semantic version spec
version = self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': '>0.0.1',
'include_module_description': 0,
'include_compilation_report': 0})[0]
self.assertEqual(version['timestamp'], 1445022818884)
self.assertEqual(version['version'], "0.0.3")
self.assertEqual(version['narrative_methods'], ['send_data'])
self.assertEqual(version['local_functions'], [])
self.assertEqual(version['module_language'], 'python')
self.assertEqual(version['module_name'], 'release_history')
self.assertEqual(version['notes'], '')
self.assertEqual(version['registration_id'], '1445022818884_4123')
self.assertEqual(version['release_tags'], ['release'])
self.assertEqual(version['release_timestamp'], 1445022818884)
self.assertEqual(version['docker_img_name'],
'dockerhub-ci.kbase.us/kbase:release_history.49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(version['dynamic_service'], 0)
self.assertEqual(version['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(version['git_commit_message'], "added username for testing")
self.assertEqual(version['git_url'], "https://github.com/kbaseIncubator/release_history")
# handle some error cases:
# need parameters to work
with self.assertRaises(ValueError) as e:
self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{})
self.assertEqual(str(e.exception),
'Missing required fields git_url or module_name')
# cannot find the right module
with self.assertRaises(ValueError) as e:
self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'made_up_module'})
self.assertEqual(str(e.exception),
'Module cannot be found based on module_name or git_url parameters.')
with self.assertRaises(ValueError) as e:
self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'git_url': 'not_a_url'})
self.assertEqual(str(e.exception),
'Module cannot be found based on module_name or git_url parameters.')
# no release version exists
with self.assertRaises(ValueError) as e:
self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'pending_first_release',
'version': 'release'})
self.assertEqual(str(e.exception),
'No module version found that matches your criteria!')
# does not have any valid versions
with self.assertRaises(ValueError) as e:
self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{
'git_url': 'https://github.com/jplfaria/ElectronicAnnotationMethods-'})
self.assertEqual(str(e.exception),
'Module was never properly registered, and has no available versions.')
# didn't give a proper version matcher
with self.assertRaises(ValueError) as e:
self.catalog.get_module_version(self.cUtil.anonymous_ctx(),
{'module_name': 'pending_first_release',
'version': 'the best version'})
self.assertEqual(str(e.exception),
'No module version found that matches your criteria!')
def test_get_version_info(self):
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'dev'})[0]
self.assertEqual(vinfo['git_commit_hash'], "b06c5f9daf603a4d206071787c3f6184000bf128")
self.assertEqual(vinfo['timestamp'], 1445024094055)
self.assertEqual(vinfo['git_commit_message'], "another change")
self.assertEqual(vinfo['version'], "0.0.5")
self.assertEqual(vinfo['narrative_methods'], ['send_data2'])
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'beta'})[0]
self.assertEqual(vinfo['git_commit_hash'], "b843888e962642d665a3b0bd701ee630c01835e6")
self.assertEqual(vinfo['timestamp'], 1445023985597)
self.assertEqual(vinfo['git_commit_message'], "update for testing")
self.assertEqual(vinfo['version'], "0.0.4")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release'})[0]
self.assertEqual(vinfo['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(vinfo['timestamp'], 1445022818884)
self.assertEqual(vinfo['git_commit_message'], "added username for testing")
self.assertEqual(vinfo['version'], "0.0.3")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release',
'git_commit_hash': '49dc505febb8f4cccb2078c58ded0de3320534d7'
})[0]
self.assertEqual(vinfo['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(vinfo['timestamp'], 1445022818884)
self.assertEqual(vinfo['git_commit_message'], "added username for testing")
self.assertEqual(vinfo['version'], "0.0.3")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release',
'timestamp': 1445022818884})[0]
self.assertEqual(vinfo['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(vinfo['timestamp'], 1445022818884)
self.assertEqual(vinfo['git_commit_message'], "added username for testing")
self.assertEqual(vinfo['version'], "0.0.3")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release',
'git_commit_hash': '49dc505febb8f4cccb2078c58ded0de3320534d7',
'timestamp': 1445022818884})[0]
self.assertEqual(vinfo['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(vinfo['timestamp'], 1445022818884)
self.assertEqual(vinfo['git_commit_message'], "added username for testing")
self.assertEqual(vinfo['version'], "0.0.3")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
# wrong version set
with self.assertRaises(ValueError) as e:
self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'not_a_Version'})
self.assertEqual(str(e.exception),
'invalid version selection, valid versions are: "dev" | "beta" | "release"')
# test wrong git commit hash
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release',
'git_commit_hash': 'b06c5f9daf603a4d206071787c3f6184000bf128'
})[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
# test wrong timestamp
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release',
'timestamp': 1445024094055})[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
# right git commit, wrong timestamp
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release',
'git_commit_hash': 'b06c5f9daf603a4d206071787c3f6184000bf128',
'timestamp': 1445024094055})[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
# right timestamp, wrong git commit hash
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'version': 'release',
'git_commit_hash': 'b843888e962642d665a3b0bd701ee630c01835e6',
'timestamp': 1445022818884})[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
#########
# now we test with a timestamp retrieval, first from one of the currents
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'timestamp': 1445022818884})[0]
self.assertEqual(vinfo['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(vinfo['timestamp'], 1445022818884)
self.assertEqual(vinfo['git_commit_message'], "added username for testing")
self.assertEqual(vinfo['version'], "0.0.3")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'timestamp': 1445022818884,
'git_commit_hash': "49dc505febb8f4cccb2078c58ded0de3320534d7"
})[0]
self.assertEqual(vinfo['git_commit_hash'], "49dc505febb8f4cccb2078c58ded0de3320534d7")
self.assertEqual(vinfo['timestamp'], 1445022818884)
self.assertEqual(vinfo['git_commit_message'], "added username for testing")
self.assertEqual(vinfo['version'], "0.0.3")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'timestamp': 1445022818884,
'git_commit_hash': "49dc505febb8f4cccb2078c51ded0de3320534d7"
})[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
# now with something in the history
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'timestamp': 1445022818000})[0]
self.assertEqual(vinfo['git_commit_hash'], "d6cd1e2bd19e03a81132a23b2025920577f84e37")
self.assertEqual(vinfo['timestamp'], 1445022818000)
self.assertEqual(vinfo['git_commit_message'], "something else")
self.assertEqual(vinfo['version'], "0.0.2")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'timestamp': 1445022818000,
'git_commit_hash': "d6cd1e2bd19e03a81132a23b2025920577f84e37"
})[0]
self.assertEqual(vinfo['git_commit_hash'], "d6cd1e2bd19e03a81132a23b2025920577f84e37")
self.assertEqual(vinfo['timestamp'], 1445022818000)
self.assertEqual(vinfo['git_commit_message'], "something else")
self.assertEqual(vinfo['version'], "0.0.2")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
# test wrong timestamp
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'timestamp': 1445024094078})[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'timestamp': 1445022818000,
'git_commit_hash': "49dc505febb8f4cccb2078c51ded0de3320534d7"}
)[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
#########
# now we test with a git commit hash retrieval, first from one of the currents
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'git_commit_hash': 'b843888e962642d665a3b0bd701ee630c01835e6'
})[0]
self.assertEqual(vinfo['git_commit_hash'], "b843888e962642d665a3b0bd701ee630c01835e6")
self.assertEqual(vinfo['timestamp'], 1445023985597)
self.assertEqual(vinfo['git_commit_message'], "update for testing")
self.assertEqual(vinfo['version'], "0.0.4")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
# next from something in the history
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'git_commit_hash': '9bedf67800b2923982bdf60c89c57ce6fd2d9a1c'
})[0]
self.assertEqual(vinfo['git_commit_hash'], "9bedf67800b2923982bdf60c89c57ce6fd2d9a1c")
self.assertEqual(vinfo['timestamp'], 1445022815000)
self.assertEqual(vinfo['git_commit_message'], "and another thing")
self.assertEqual(vinfo['version'], "0.0.1")
self.assertEqual(vinfo['narrative_methods'], ['send_data'])
# test wrong git commit hash
with self.assertRaises(ValueError) as e:
vinfo = self.catalog.get_version_info(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history',
'git_commit_hash': 'b06c5f9daf603a4d202071787c3f6184000bf128'
})[0]
self.assertEqual(str(e.exception),
'No version found that matches all your criteria!')
def test_list_released_module_versions(self):
# history should return versions sorted by timestamp
history = self.catalog.list_released_module_versions(self.cUtil.anonymous_ctx(),
{'module_name': 'release_history'})[0]
self.assertTrue(len(history) == 3)
self.assertEqual(history[0]['git_commit_hash'], '9bedf67800b2923982bdf60c89c57ce6fd2d9a1c')
self.assertEqual(history[0]['timestamp'], 1445022815000)
self.assertEqual(history[0]['version'], '0.0.1')
self.assertEqual(history[0]['narrative_methods'], ['send_data'])
self.assertEqual(history[1]['git_commit_hash'], 'd6cd1e2bd19e03a81132a23b2025920577f84e37')
self.assertEqual(history[1]['timestamp'], 1445022818000)
self.assertEqual(history[1]['version'], '0.0.2')
self.assertEqual(history[2]['git_commit_hash'], '49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(history[2]['timestamp'], 1445022818884)
self.assertEqual(history[2]['version'], '0.0.3')
history = self.catalog.list_released_module_versions(self.cUtil.anonymous_ctx(),
{'module_name': 'onerepotest'})[0]
self.assertTrue(len(history) == 1)
self.assertEqual(history[0]['git_commit_hash'], '49dc505febb8f4cccb2078c58ded0de3320534d7')
self.assertEqual(history[0]['git_commit_message'], 'added username for testing')
self.assertEqual(history[0]['timestamp'], 1445022818884)
self.assertEqual(history[0]['version'], '0.0.1')
self.assertEqual(history[0]['narrative_methods'], ['send_data'])
history = self.catalog.list_released_module_versions(self.cUtil.anonymous_ctx(),
{
'git_url': 'https://github.com/kbaseIncubator/pending_Release'
})[0]
self.assertEqual(history, [])
@classmethod
def setUpClass(cls):
print('++++++++++++ RUNNING basic_catalog_test.py +++++++++++')
cls.cUtil = CatalogTestUtil('.') # TODO: pass in test directory from outside
cls.cUtil.setUp()
cls.catalog = Catalog(cls.cUtil.getCatalogConfig())
@classmethod
def tearDownClass(cls):
cls.cUtil.tearDown()
| 59.971622
| 127
| 0.556029
| 3,880
| 44,379
| 6.155155
| 0.066753
| 0.163931
| 0.082908
| 0.058035
| 0.85621
| 0.825475
| 0.785361
| 0.730801
| 0.727326
| 0.709614
| 0
| 0.06826
| 0.328894
| 44,379
| 739
| 128
| 60.052774
| 0.733606
| 0.026274
| 0
| 0.637363
| 0
| 0
| 0.282385
| 0.066287
| 0
| 0
| 0
| 0.001353
| 0.464678
| 1
| 0.017268
| false
| 0
| 0.006279
| 0
| 0.025118
| 0.00157
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b786a27c54aadfe89afb1251ddec14e18ad00557
| 35,893
|
py
|
Python
|
models/Global-Flow-Local-Attention/model/networks/generator.py
|
xianjian-xie/pose-generation
|
ad0495e80c6fe1e7690fa8691f1eb11b4e9bca32
|
[
"MIT"
] | null | null | null |
models/Global-Flow-Local-Attention/model/networks/generator.py
|
xianjian-xie/pose-generation
|
ad0495e80c6fe1e7690fa8691f1eb11b4e9bca32
|
[
"MIT"
] | null | null | null |
models/Global-Flow-Local-Attention/model/networks/generator.py
|
xianjian-xie/pose-generation
|
ad0495e80c6fe1e7690fa8691f1eb11b4e9bca32
|
[
"MIT"
] | null | null | null |
import re
import torch
import torch.nn as nn
import torch.nn.functional as F
from model.networks.base_network import BaseNetwork
from model.networks.resample2d_package.resample2d import Resample2d
from model.networks.base_function import *
from torch.nn.utils.spectral_norm import spectral_norm as SpectralNorm
######################################################################################################
# Human Pose Image Generation
######################################################################################################
class PoseGenerator(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=19, output_nc=3, ngf=64, img_f=1024, layers=6, num_blocks=2,
norm='batch', activation='ReLU', attn_layer=[1,2], extractor_kz={'1':5,'2':5}, use_spect=True, use_coord=False):
super(PoseGenerator, self).__init__()
self.source = PoseSourceNet(image_nc, ngf, img_f, layers,
norm, activation, use_spect, use_coord)
self.target = PoseTargetNet(image_nc, structure_nc+1, output_nc, ngf, img_f, layers, num_blocks,
norm, activation, attn_layer, extractor_kz, use_spect, use_coord)
self.flow_net = PoseFlowNet(image_nc, structure_nc, ngf=32, img_f=256, encoder_layer=5,
attn_layer=attn_layer, norm=norm, activation=activation,
use_spect=use_spect, use_coord=use_coord)
def forward(self, source, source_B, target_B, target_M):
feature_list = self.source(source)
flow_fields, masks = self.flow_net(source, source_B, target_B, target_M)
image_gen = self.target(target_B, target_M, feature_list, flow_fields, masks)
return image_gen, flow_fields, masks
def forward_hook_function(self, source, source_B, target_B, target_M):
feature_list = self.source(source)
flow_fields, masks = self.flow_net(source, source_B, target_B, target_M)
hook_target, hook_source, hook_attn, hook_mask = self.target.forward_hook_function(target_B, target_M, feature_list, flow_fields, masks)
return hook_target, hook_source, hook_attn, hook_mask
class PoseSourceNet(BaseNetwork):
def __init__(self, input_nc=3, ngf=64, img_f=1024, layers=6, norm='batch',
activation='ReLU', use_spect=True, use_coord=False):
super(PoseSourceNet, self).__init__()
self.layers = layers
norm_layer = get_norm_layer(norm_type=norm)
nonlinearity = get_nonlinearity_layer(activation_type=activation)
# encoder part CONV_BLOCKS
self.block0 = EncoderBlock(input_nc, ngf, norm_layer,
nonlinearity, use_spect, use_coord)
mult = 1
for i in range(layers-1):
mult_prev = mult
mult = min(2 ** (i + 1), img_f//ngf)
block = EncoderBlock(ngf*mult_prev, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'encoder' + str(i), block)
def forward(self, source):
feature_list=[source]
out = self.block0(source)
feature_list.append(out)
for i in range(self.layers-1):
model = getattr(self, 'encoder' + str(i))
out = model(out)
feature_list.append(out)
feature_list = list(reversed(feature_list))
return feature_list
class PoseTargetNet(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=19, output_nc=3, ngf=64, img_f=1024, layers=6, num_blocks=2,
norm='batch', activation='ReLU', attn_layer=[1,2], extractor_kz={'1':5,'2':5}, use_spect=True, use_coord=False):
super(PoseTargetNet, self).__init__()
self.layers = layers
self.attn_layer = attn_layer
norm_layer = get_norm_layer(norm_type=norm)
nonlinearity = get_nonlinearity_layer(activation_type=activation)
self.block0 = EncoderBlock(structure_nc, ngf, norm_layer,
nonlinearity, use_spect, use_coord)
mult = 1
for i in range(layers-1):
mult_prev = mult
mult = min(2 ** (i + 1), img_f//ngf)
block = EncoderBlock(ngf*mult_prev, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'encoder' + str(i), block)
# decoder part
mult = min(2 ** (layers-1), img_f//ngf)
for i in range(layers):
mult_prev = mult
mult = min(2 ** (layers-i-2), img_f//ngf) if i != layers-1 else 1
if num_blocks == 1:
up = nn.Sequential(ResBlockDecoder(ngf*mult_prev, ngf*mult, None, norm_layer,
nonlinearity, use_spect, use_coord))
else:
up = nn.Sequential(ResBlocks(num_blocks-1, ngf*mult_prev, None, None, norm_layer,
nonlinearity, False, use_spect, use_coord),
ResBlockDecoder(ngf*mult_prev, ngf*mult, None, norm_layer,
nonlinearity, use_spect, use_coord))
setattr(self, 'decoder' + str(i), up)
if layers-i in attn_layer:
attn = ExtractorAttn(ngf*mult_prev, extractor_kz[str(layers-i)], nonlinearity, softmax=True)
setattr(self, 'attn' + str(i), attn)
self.outconv = Output(ngf, output_nc, 3, None, nonlinearity, use_spect, use_coord)
def forward(self, target_B, target_M, source_feature, flow_fields, masks):
target = torch.cat((target_B, target_M.unsqueeze(dim=1)), dim=1)
out = self.block0(target)
for i in range(self.layers-1):
model = getattr(self, 'encoder' + str(i))
out = model(out)
counter=0
for i in range(self.layers):
if self.layers-i in self.attn_layer:
model = getattr(self, 'attn' + str(i))
out_attn = model(source_feature[i], out, flow_fields[counter])
out = out*(1-masks[counter]) + out_attn*masks[counter]
counter += 1
model = getattr(self, 'decoder' + str(i))
out = model(out)
out_image = self.outconv(out)
return out_image
def forward_hook_function(self, target_B, source_feature, flow_fields, masks):
hook_target=[]
hook_source=[]
hook_attn=[]
hook_mask=[]
out = self.block0(target_B)
for i in range(self.layers-1):
model = getattr(self, 'encoder' + str(i))
out = model(out)
counter=0
for i in range(self.layers):
if self.layers-i in self.attn_layer:
model = getattr(self, 'attn' + str(i))
attn_param, out_attn = model.hook_attn_param(source_feature[i], out, flow_fields[counter])
out = out*(1-masks[counter]) + out_attn*masks[counter]
hook_target.append(out)
hook_source.append(source_feature[i])
hook_attn.append(attn_param)
hook_mask.append(masks[counter])
counter += 1
model = getattr(self, 'decoder' + str(i))
out = model(out)
out_image = self.outconv(out)
return hook_target, hook_source, hook_attn, hook_mask
class PoseFlowNet(nn.Module):
"""docstring for FlowNet"""
def __init__(self, image_nc, structure_nc, ngf=64, img_f=1024, encoder_layer=5, attn_layer=[1], norm='batch',
activation='ReLU', use_spect=True, use_coord=False):
super(PoseFlowNet, self).__init__()
self.encoder_layer = encoder_layer
self.decoder_layer = encoder_layer - min(attn_layer)
self.attn_layer = attn_layer
norm_layer = get_norm_layer(norm_type=norm)
nonlinearity = get_nonlinearity_layer(activation_type=activation)
input_nc = 2*structure_nc + image_nc + 1
self.block0 = EncoderBlock(input_nc, ngf, norm_layer,
nonlinearity, use_spect, use_coord)
mult = 1
for i in range(encoder_layer-1):
mult_prev = mult
mult = min(2 ** (i + 1), img_f//ngf)
block = EncoderBlock(ngf*mult_prev, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'encoder' + str(i), block)
for i in range(self.decoder_layer):
mult_prev = mult
mult = min(2 ** (encoder_layer-i-2), img_f//ngf) if i != encoder_layer-1 else 1
up = ResBlockDecoder(ngf*mult_prev, ngf*mult, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'decoder' + str(i), up)
jumpconv = Jump(ngf*mult, ngf*mult, 3, None, nonlinearity, use_spect, use_coord)
setattr(self, 'jump' + str(i), jumpconv)
if encoder_layer-i-1 in attn_layer:
flow_out = nn.Conv2d(ngf*mult, 2, kernel_size=3,stride=1,padding=1,bias=True)
setattr(self, 'output' + str(i), flow_out)
flow_mask = nn.Sequential(nn.Conv2d(ngf*mult, 1, kernel_size=3,stride=1,padding=1,bias=True),
nn.Sigmoid())
setattr(self, 'mask' + str(i), flow_mask)
def forward(self, source, source_B, target_B, target_M):
flow_fields=[]
masks=[]
inputs = torch.cat((source, source_B, target_B, target_M.unsqueeze(dim=1)), 1)
out = self.block0(inputs)
result=[out]
for i in range(self.encoder_layer-1):
model = getattr(self, 'encoder' + str(i))
out = model(out)
result.append(out)
for i in range(self.decoder_layer):
model = getattr(self, 'decoder' + str(i))
out = model(out)
model = getattr(self, 'jump' + str(i))
jump = model(result[self.encoder_layer-i-2])
out = out+jump
if self.encoder_layer-i-1 in self.attn_layer:
flow_field, mask = self.attn_output(out, i)
flow_fields.append(flow_field)
masks.append(mask)
return flow_fields, masks
def attn_output(self, out, i):
model = getattr(self, 'output' + str(i))
flow = model(out)
model = getattr(self, 'mask' + str(i))
mask = model(out)
return flow, mask
class PoseFlowNetGenerator(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=19, output_nc=3, ngf=64, img_f=1024, layers=6, norm='batch',
activation='ReLU', encoder_layer=5, attn_layer=[1,2], use_spect=True, use_coord=False):
super(PoseFlowNetGenerator, self).__init__()
self.layers = layers
self.attn_layer = attn_layer
self.flow_net = PoseFlowNet(image_nc, structure_nc, ngf, img_f,
encoder_layer, attn_layer=attn_layer,
norm=norm, activation=activation,
use_spect=use_spect, use_coord= use_coord)
def forward(self, source, source_B, target_B):
flow_fields, masks = self.flow_net(source, source_B, target_B)
return flow_fields, masks
######################################################################################################
# Pose-Guided Person Image Animation
######################################################################################################
class DanceGenerator(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=18, output_nc=3, ngf=64, img_f=1024, layers=6, num_blocks=2,
norm='batch', activation='ReLU', attn_layer=[1,2], extractor_kz={'1':5,'2':5}, use_spect=True, use_coord=False):
super(DanceGenerator, self).__init__()
self.source_previous = PoseSourceNet(image_nc, ngf, img_f, layers,
norm, activation, use_spect, use_coord)
self.source_reference = PoseSourceNet(image_nc, ngf, img_f, layers,
norm, activation, use_spect, use_coord)
self.target = FaceTargetNet(image_nc, structure_nc, output_nc, ngf, img_f, layers, num_blocks,
norm, activation, attn_layer, extractor_kz, use_spect, use_coord)
flow_norm, flow_activation = 'instance', 'LeakyReLU'
self.flow_net_previous = PoseFlowNet(image_nc, structure_nc, ngf=32, img_f=256, encoder_layer=5,
attn_layer=attn_layer, norm=flow_norm, activation=flow_activation,
use_spect=use_spect, use_coord=use_coord)
self.flow_net_reference= PoseFlowNet(image_nc, structure_nc, ngf=32, img_f=256, encoder_layer=5,
attn_layer=attn_layer, norm=flow_norm, activation=flow_activation,
use_spect=use_spect, use_coord=use_coord)
def forward(self, BP_frame_step, P_reference, BP_reference, P_previous, BP_previous):
n_frames_load = BP_frame_step.size(1)
out_image_gen,out_flow_fields,out_masks,P_previous_recoder=[],[],[],[]
for i in range(n_frames_load):
BP = BP_frame_step[:,i,...]
P_previous = P_reference if P_previous is None else P_previous
BP_previous = BP_reference if BP_previous is None else BP_previous
P_previous_recoder.append(P_previous)
previous_feature_list = self.source_previous(P_previous)
reference_feature_list = self.source_reference(P_reference)
# Sources_image = torch.cat((P_previous, P_reference ), 0)
# Sources_bone = torch.cat((BP_previous,BP_reference), 0)
# Targets_bone = torch.cat((BP, BP), 0)
flow_fields_p, masks_p = self.flow_net_previous( P_previous, BP_previous, BP)
flow_fields_r, masks_r = self.flow_net_reference(P_reference, BP_reference, BP)
flow,mask=[],[]
for i in range(len(flow_fields_p)):
flow.append(flow_fields_p[i])
flow.append(flow_fields_r[i])
mask.append(masks_p[i])
mask.append(masks_r[i])
image_gen = self.target(BP, previous_feature_list, reference_feature_list, flow, mask)
P_previous = image_gen
BP_previous = BP
out_image_gen.append(image_gen)
out_flow_fields.append(flow)
out_masks.append(mask)
return out_image_gen, out_flow_fields, out_masks, P_previous_recoder
class KPInput2DGenerator(BaseNetwork):
def __init__(self, structure_nc=18, channels=256, layers=4):
super(KPInput2DGenerator, self).__init__()
self.kp_input = KPInputNet2D(keypoint_nc=structure_nc, channels=channels, layers=layers)
def forward(self, input_2d):
keypoints = self.kp_input(input_2d)
return keypoints
class KPInputNet2D(BaseNetwork):
def __init__(self, keypoint_nc=25, channels=256, layers=3, dropout=0.15, image_size=(256,256)):
super(KPInputNet2D, self).__init__()
kernel_size=3
self.keypoint_nc = keypoint_nc
self.image_size = image_size
self.layers = layers
self.expand_conv = nn.Conv1d(keypoint_nc*2, channels, kernel_size, bias=False)
self.expand_ln = LayerNorm1d(channels)
self.shrink = nn.Conv1d(channels, keypoint_nc*2, 1)
self.drop = nn.Dropout(dropout)
self.relu = nn.ReLU(inplace=True)
self.lrelu = nn.LeakyReLU(0.1)
layers_conv = []
layers_ln = []
self.pad=[(kernel_size - 1) // 2]
next_dilation = kernel_size
for i in range(1, self.layers):
self.pad.append((kernel_size - 1)*next_dilation // 2)
layers_conv.append(nn.Conv1d(channels,channels,kernel_size,
dilation=next_dilation,bias=False))
layers_ln.append(ADALN1d(channels, channels))
layers_conv.append(nn.Conv1d(channels, channels, 1, dilation=1, bias=False))
layers_ln.append(ADALN1d(channels, channels))
next_dilation *= kernel_size
self.layers_conv = nn.ModuleList(layers_conv)
self.layers_ln = nn.ModuleList(layers_ln)
self.feature_conv_1 = nn.Conv1d(keypoint_nc*2, channels, kernel_size, stride=2 ,bias=False)
self.feature_conv_2 = nn.Conv1d(channels, channels, kernel_size, stride=2 ,bias=False)
self.feature_conv_3 = nn.Conv1d(channels, channels, kernel_size, stride=2 ,bias=False)
def forward(self, kp):
feature = self.lrelu(self.feature_conv_1(kp))
feature = self.lrelu(self.feature_conv_2(feature))
feature = self.lrelu(self.feature_conv_3(feature))
feature = torch.mean(feature, 2)
x = self.drop(self.lrelu(self.expand_ln(self.expand_conv(kp))))
for i in range(self.layers - 1):
pad = self.pad[i+1]
res = x[:, :, pad : x.shape[2] - pad]
x = self.drop(self.lrelu(self.layers_ln[2*i](self.layers_conv[2*i](x), feature)))
x = res + self.drop(self.lrelu(self.layers_ln[2*i+1](self.layers_conv[2*i+1](x), feature)))
x = self.shrink(x)
return x
######################################################################################################
# Face Image Generation
######################################################################################################
class FaceGenerator(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=18, output_nc=3, ngf=64, img_f=1024, layers=6, num_blocks=2,
norm='batch', activation='ReLU', attn_layer=[1,2], extractor_kz={'1':5,'2':5}, use_spect=True, use_coord=False):
super(FaceGenerator, self).__init__()
self.source_previous = PoseSourceNet(image_nc, ngf, img_f, layers,
norm, activation, use_spect, use_coord)
self.source_reference = PoseSourceNet(image_nc, ngf, img_f, layers,
norm, activation, use_spect, use_coord)
self.target = FaceTargetNet(image_nc, structure_nc, output_nc, ngf, img_f, layers, num_blocks,
norm, activation, attn_layer, extractor_kz, use_spect, use_coord)
self.flow_net = FaceFlowNet(image_nc, structure_nc, ngf=32, img_f=256, encoder_layer=5,
attn_layer=attn_layer, norm=norm, activation=activation,
use_spect=use_spect, use_coord=use_coord)
def forward(self, BP_frame_step, P_reference, BP_reference, P_previous, BP_previous):
n_frames_load = BP_frame_step.size(1)
out_image_gen,out_flow_fields,out_masks,P_previous_recoder=[],[],[],[]
for i in range(n_frames_load):
# BP_previous = BP_frame_step[:, i, ...]
BP = BP_frame_step[:,i,...]
P_previous = P_reference if P_previous is None else P_previous
BP_previous = BP_reference if BP_previous is None else BP_previous
P_reference = P_reference
BP_reference = BP_reference
P_previous_recoder.append(P_previous)
previous_feature_list = self.source_previous(P_previous)
reference_feature_list = self.source_reference(P_reference)
flow_fields, masks = self.flow_net(BP, P_previous, BP_previous, P_reference, BP_reference)
image_gen = self.target(BP, previous_feature_list, reference_feature_list, flow_fields, masks)
P_previous = image_gen
BP_previous = BP
out_image_gen.append(image_gen)
out_flow_fields.append(flow_fields)
out_masks.append(masks)
return out_image_gen, out_flow_fields, out_masks, P_previous_recoder
class FaceTargetNet(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=18, output_nc=3, ngf=64, img_f=1024, layers=6, num_blocks=2,
norm='batch', activation='ReLU', attn_layer=[1,2], extractor_kz={'1':5,'2':5}, use_spect=True, use_coord=False):
super(FaceTargetNet, self).__init__()
self.layers = layers
self.attn_layer = attn_layer
norm_layer = get_norm_layer(norm_type=norm)
nonlinearity = get_nonlinearity_layer(activation_type=activation)
self.block0 = EncoderBlock(structure_nc, ngf, norm_layer,
nonlinearity, use_spect, use_coord)
mult = 1
for i in range(layers-1):
mult_prev = mult
mult = min(2 ** (i + 1), img_f//ngf)
block = EncoderBlock(ngf*mult_prev, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'encoder' + str(i), block)
# decoder part
mult = min(2 ** (layers-1), img_f//ngf)
for i in range(layers):
mult_prev = mult
mult = min(2 ** (layers-i-2), img_f//ngf) if i != layers-1 else 1
if num_blocks == 1:
up = nn.Sequential(ResBlockDecoder(ngf*mult_prev, ngf*mult, None, norm_layer,
nonlinearity, use_spect, use_coord))
else:
up = nn.Sequential(ResBlocks(num_blocks-1, ngf*mult_prev, None, None, norm_layer,
nonlinearity, False, use_spect, use_coord),
ResBlockDecoder(ngf*mult_prev, ngf*mult, None, norm_layer,
nonlinearity, use_spect, use_coord))
setattr(self, 'decoder' + str(i), up)
if layers-i in attn_layer:
attn = ExtractorAttn(ngf*mult_prev, extractor_kz[str(layers-i)], nonlinearity, softmax=True)
setattr(self, 'attn_p' + str(i), attn)
attn = ExtractorAttn(ngf*mult_prev, extractor_kz[str(layers-i)], nonlinearity, softmax=True)
setattr(self, 'attn_r' + str(i), attn)
# attn = ExtractorAttn(ngf*mult_prev, extractor_kz[str(layers-i)], nonlinearity, softmax=True)
# setattr(self, 'attn' + str(i), attn)
self.outconv = Output(ngf, output_nc, 3, None, nonlinearity, use_spect, use_coord)
def forward(self, BP, previous_feature_list, reference_feature_list, flow_fields, masks):
out = self.block0(BP)
for i in range(self.layers-1):
model = getattr(self, 'encoder' + str(i))
out = model(out)
counter=0
for i in range(self.layers):
if self.layers-i in self.attn_layer:
model_p = getattr(self, 'attn_p' + str(i))
model_r = getattr(self, 'attn_r' + str(i))
out_attn_p = model_p(previous_feature_list[i], out, flow_fields[2*counter])
out_attn_r = model_r(reference_feature_list[i], out, flow_fields[2*counter+1])
out_p = out*(1-masks[2*counter]) + out_attn_p*masks[2*counter]
out_r = out*(1-masks[2*counter+1]) + out_attn_r*masks[2*counter+1]
out = out_p + out_r
counter += 1
model = getattr(self, 'decoder' + str(i))
out = model(out)
out_image = self.outconv(out)
return out_image
class FaceFlowNet(nn.Module):
def __init__(self, image_nc, structure_nc, ngf=64, img_f=1024, encoder_layer=5, attn_layer=[1], norm='batch',
activation='ReLU', use_spect=True, use_coord=False):
super(FaceFlowNet, self).__init__()
self.encoder_layer = encoder_layer
self.decoder_layer = encoder_layer - min(attn_layer)
self.attn_layer = attn_layer
norm_layer = get_norm_layer(norm_type=norm)
nonlinearity = get_nonlinearity_layer(activation_type=activation)
input_nc = 3*structure_nc + 2*image_nc
self.block0 = EncoderBlock(input_nc, ngf, norm_layer,
nonlinearity, use_spect, use_coord)
mult = 1
for i in range(encoder_layer-1):
mult_prev = mult
mult = min(2 ** (i + 1), img_f//ngf)
block = EncoderBlock(ngf*mult_prev, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'encoder' + str(i), block)
for i in range(self.decoder_layer):
mult_prev = mult
mult = min(2 ** (encoder_layer-i-2), img_f//ngf) if i != encoder_layer-1 else 1
up = ResBlockDecoder(ngf*mult_prev, ngf*mult, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'decoder' + str(i), up)
jumpconv = Jump(ngf*mult, ngf*mult, 3, None, nonlinearity, use_spect, use_coord)
setattr(self, 'jump' + str(i), jumpconv)
if encoder_layer-i-1 in attn_layer:
flow_out = nn.Conv2d(ngf*mult, 4, kernel_size=3,stride=1,padding=1,bias=True)
setattr(self, 'output' + str(i), flow_out)
flow_mask = nn.Sequential(nn.Conv2d(ngf*mult, 2, kernel_size=3,stride=1,padding=1,bias=True),
nn.Sigmoid())
setattr(self, 'mask' + str(i), flow_mask)
def forward(self, BP, P_previous, BP_previous, P_reference, BP_reference):
flow_fields=[]
masks=[]
inputs = torch.cat((BP, P_previous, BP_previous, P_reference, BP_reference), 1)
out = self.block0(inputs)
result=[out]
for i in range(self.encoder_layer-1):
model = getattr(self, 'encoder' + str(i))
out = model(out)
result.append(out)
for i in range(self.decoder_layer):
model = getattr(self, 'decoder' + str(i))
out = model(out)
model = getattr(self, 'jump' + str(i))
jump = model(result[self.encoder_layer-i-2])
out = out+jump
if self.encoder_layer-i-1 in self.attn_layer:
flow_field, mask = self.attn_output(out, i)
flow_field_p, flow_field_r = torch.split(flow_field, 2, dim=1)
mask_p, mask_r = torch.split(mask, 1, dim=1)
flow_fields.append(flow_field_p)
flow_fields.append(flow_field_r)
masks.append(mask_p)
masks.append(mask_r)
return flow_fields, masks
def attn_output(self, out, i):
model = getattr(self, 'output' + str(i))
flow = model(out)
model = getattr(self, 'mask' + str(i))
mask = model(out)
return flow, mask
######################################################################################################
# Shape Net Image Generation (Multi-view synthesis)
######################################################################################################
class ShapeNetGenerator(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=18, output_nc=3, ngf=64, img_f=1024, layers=6, num_blocks=2,
norm='batch', activation='ReLU', attn_layer=[1,2], extractor_kz={'1':5,'2':5}, use_spect=True, use_coord=False):
super(ShapeNetGenerator, self).__init__()
self.layers = layers
self.attn_layer = attn_layer
self.source = PoseSourceNet(image_nc, ngf, img_f, layers,
norm, activation, use_spect, use_coord)
self.target = ShapeNetTargetNet(image_nc, structure_nc, output_nc, ngf, img_f, layers, num_blocks,
norm, activation, attn_layer, extractor_kz, use_spect, use_coord)
self.flow_net = ShapeNetFlowNet(image_nc, structure_nc, 32, 256,
encoder_layer=5, attn_layer=attn_layer,
norm=norm, activation=activation,
use_spect=use_spect, use_coord= use_coord)
def forward(self, source, source_B, target_B):
feature_list = self.source(source)
flow_fields, masks = self.flow_net(source, source_B, target_B)
image_gen = self.target(target_B, feature_list, flow_fields, masks)
return image_gen, flow_fields, masks
class ShapeNetTargetNet(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=18, output_nc=3, ngf=64, img_f=1024, layers=6, num_blocks=2,
norm='batch', activation='ReLU', attn_layer=[1,2], extractor_kz={'1':5,'2':5}, use_spect=True, use_coord=False):
super(ShapeNetTargetNet, self).__init__()
self.layers = layers
self.attn_layer = attn_layer
norm_layer = get_norm_layer(norm_type=norm)
nonlinearity = get_nonlinearity_layer(activation_type=activation)
self.block0 = ResBlockDecoder(structure_nc, ngf, None, norm_layer,
nonlinearity, use_spect, use_coord)
mult = min(2 ** (layers-1), img_f//ngf)
self.block1 = ResBlockDecoder(ngf, ngf*mult, None, norm_layer,
nonlinearity, use_spect, use_coord)
for i in range(layers):
mult_prev = mult
mult = min(2 ** (layers-i-2), img_f//ngf) if i != layers-1 else 1
if num_blocks == 1:
up = nn.Sequential(ResBlockDecoder(ngf*mult_prev, ngf*mult, None, norm_layer,
nonlinearity, use_spect, use_coord))
else:
up = nn.Sequential(ResBlocks(num_blocks-1, ngf*mult_prev, None, None, norm_layer,
nonlinearity, False, use_spect, use_coord),
ResBlockDecoder(ngf*mult_prev, ngf*mult, None, norm_layer,
nonlinearity, use_spect, use_coord))
setattr(self, 'decoder' + str(i), up)
if layers-i in attn_layer:
attn = ExtractorAttn(ngf*mult_prev, extractor_kz[str(layers-i)], nonlinearity, softmax=True)
setattr(self, 'attn' + str(i), attn)
self.outconv = Output(ngf, output_nc, 3, None, nonlinearity, use_spect, use_coord)
def forward(self, target_B, source_feature, flow_fields, masks):
target_B = target_B.repeat(1, 1, 8, 8)
out = self.block0(target_B)
out = self.block1(out)
counter=0
for i in range(self.layers):
if self.layers-i in self.attn_layer:
model = getattr(self, 'attn' + str(i))
out_attn = model(source_feature[i], out, flow_fields[counter])
out = out*(1-masks[counter]) + out_attn*masks[counter]
counter += 1
model = getattr(self, 'decoder' + str(i))
out = model(out)
out_image = self.outconv(out)
return out_image
class ShapeNetFlowNet(nn.Module):
def __init__(self, image_nc, structure_nc, ngf=64, img_f=1024, encoder_layer=5, attn_layer=[1], norm='batch',
activation='ReLU', use_spect=True, use_coord=False):
super(ShapeNetFlowNet, self).__init__()
self.encoder_layer = encoder_layer
self.decoder_layer = encoder_layer - min(attn_layer)
self.attn_layer = attn_layer
norm_layer = get_norm_layer(norm_type=norm)
nonlinearity = get_nonlinearity_layer(activation_type=activation)
# input_nc = structure_nc + image_nc
input_nc = image_nc
self.block0 = EncoderBlock(input_nc, ngf, norm_layer,
nonlinearity, use_spect, use_coord)
mult = 1
for i in range(encoder_layer-1):
mult_prev = mult
mult = min(2 ** (i + 1), img_f//ngf)
block = EncoderBlock(ngf*mult_prev, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'encoder' + str(i), block)
self.cat = ResBlocks(1, ngf*mult+structure_nc, ngf*mult, None, norm_layer, nonlinearity, False, use_spect, use_coord)
for i in range(self.decoder_layer):
mult_prev = mult
mult = min(2 ** (encoder_layer-i-2), img_f//ngf) if i != encoder_layer-1 else 1
up = ResBlockDecoder(ngf*mult_prev, ngf*mult, ngf*mult, norm_layer,
nonlinearity, use_spect, use_coord)
setattr(self, 'decoder' + str(i), up)
jumpconv = Jump(ngf*mult, ngf*mult, 3, None, nonlinearity, use_spect, use_coord)
setattr(self, 'jump' + str(i), jumpconv)
if encoder_layer-i-1 in attn_layer:
flow_out = nn.Conv2d(ngf*mult, 2, kernel_size=3,stride=1,padding=1,bias=True)
setattr(self, 'output' + str(i), flow_out)
flow_mask = nn.Sequential(nn.Conv2d(ngf*mult, 1, kernel_size=3,stride=1,padding=1,bias=True),
nn.Sigmoid())
setattr(self, 'mask' + str(i), flow_mask)
def forward(self, source, source_B, target_B):
flow_fields=[]
masks=[]
# inputs = torch.cat((source), 1)
out = self.block0(source)
result=[out]
for i in range(self.encoder_layer-1):
model = getattr(self, 'encoder' + str(i))
out = model(out)
result.append(out)
out = self.encode_ShapeNet_bone(source_B, target_B, out)
for i in range(self.decoder_layer):
model = getattr(self, 'decoder' + str(i))
out = model(out)
model = getattr(self, 'jump' + str(i))
jump = model(result[self.encoder_layer-i-2])
out = out+jump
if self.encoder_layer-i-1 in self.attn_layer:
flow_field, mask = self.attn_output(out, i)
flow_fields.append(flow_field)
masks.append(mask)
return flow_fields, masks
def attn_output(self, out, i):
model = getattr(self, 'output' + str(i))
flow = model(out)
model = getattr(self, 'mask' + str(i))
mask = model(out)
return flow, mask
def encode_ShapeNet_bone(self, source_B, target_B, out):
B=source_B-target_B
_,_,w,h = out.size()
B=B.repeat(1, 1, w, h)
out = torch.cat((out,B), 1)
out = self.cat(out)
return out
class ShapeNetFlowNetGenerator(BaseNetwork):
def __init__(self, image_nc=3, structure_nc=18, output_nc=3, ngf=64, img_f=1024, norm='batch',
activation='ReLU', encoder_layer=5, attn_layer=[1,2], use_spect=True, use_coord=False):
super(ShapeNetFlowNetGenerator, self).__init__()
self.attn_layer = attn_layer
self.flow_net = ShapeNetFlowNet(image_nc, structure_nc, ngf, img_f,
encoder_layer, attn_layer=attn_layer,
norm=norm, activation=activation,
use_spect=use_spect, use_coord= use_coord)
def forward(self, source, source_B, target_B):
flow_fields, masks = self.flow_net(source, source_B, target_B)
return flow_fields, masks
| 46.194337
| 152
| 0.573454
| 4,436
| 35,893
| 4.382777
| 0.045987
| 0.028804
| 0.03225
| 0.041148
| 0.833248
| 0.815297
| 0.801255
| 0.786185
| 0.760621
| 0.74766
| 0
| 0.018645
| 0.300699
| 35,893
| 776
| 153
| 46.253866
| 0.755936
| 0.017803
| 0
| 0.71066
| 0
| 0
| 0.013308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060914
| false
| 0
| 0.013536
| 0
| 0.135364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b79ced3f0610d936101d7bb120233db03e447b29
| 32
|
py
|
Python
|
utils/reset.py
|
VarunRajPanigrahy/HTNE-Sentr3y3
|
1ce3666421a53749ff9348a8a9b5adb44e7e08c0
|
[
"MIT"
] | null | null | null |
utils/reset.py
|
VarunRajPanigrahy/HTNE-Sentr3y3
|
1ce3666421a53749ff9348a8a9b5adb44e7e08c0
|
[
"MIT"
] | 8
|
2020-06-07T16:20:12.000Z
|
2022-03-12T00:34:06.000Z
|
utils/reset.py
|
VarunRajPanigrahy/HTNE-Sentr3y3
|
1ce3666421a53749ff9348a8a9b5adb44e7e08c0
|
[
"MIT"
] | 1
|
2020-10-02T06:23:46.000Z
|
2020-10-02T06:23:46.000Z
|
import os
def reset():
pass
| 8
| 12
| 0.625
| 5
| 32
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.28125
| 32
| 4
| 13
| 8
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
b7c6ef7d4090ca4aa71b23cf7e863aa38222a3ed
| 7,280
|
py
|
Python
|
baselines/data.py
|
xttx123/DAST-GCN
|
9d74faa328ea746d5382ac96aa6a26c686a2f2dc
|
[
"MIT"
] | 3
|
2021-12-07T08:11:08.000Z
|
2022-01-27T01:39:55.000Z
|
baselines/data.py
|
xttx123/DAST-GCN
|
9d74faa328ea746d5382ac96aa6a26c686a2f2dc
|
[
"MIT"
] | 2
|
2021-09-30T08:47:03.000Z
|
2021-11-11T10:20:09.000Z
|
baselines/data.py
|
AhmedElGazzar0/DAST-GCN
|
9d74faa328ea746d5382ac96aa6a26c686a2f2dc
|
[
"MIT"
] | 2
|
2021-12-25T09:40:03.000Z
|
2022-03-31T11:54:39.000Z
|
import numpy as np
import pandas as pd
from torch.utils.data import DataLoader
from sklearn.utils import shuffle
def get_corr_vector(df, corr, atlas, label, nrois=116):
n_corr_mat = int(nrois * (nrois + 1) / 2)
total_subjects = len(df)
X = np.zeros((total_subjects, n_corr_mat))
Y = np.zeros(total_subjects, dtype=int)
if corr == 'cc':
c = 'corrmat_file'
elif corr == 'pc':
c = 'partial_corrmat_file'
else:
c = 'mi_file'
j = 0
for i in range(total_subjects):
corr_file = df[c].iloc[i].replace('ATLAS', atlas)
corr_vals = np.load(corr_file)
cc_triu_ids = np.triu_indices(nrois)
cc_vector = corr_vals[cc_triu_ids]
X[j] = cc_vector
Y[j] = df[label].iloc[i]
j += 1
return X, Y
def get_corr_matrix(df, corr, atlas, nrois, label):
df = pd.read_csv(df)
total_subjects = len(df)
X = np.zeros((total_subjects, nrois, nrois))
Y = np.zeros(total_subjects, dtype=int)
if corr == 'cc':
c = 'corrmat_file'
elif corr == 'pc':
c = 'partial_corrmat_file'
else:
c = 'mi_file'
j = 0
for i in range(total_subjects):
corr_file = df[c].iloc[i].replace('ATLAS', atlas)
corr_vals = np.load(corr_file)
X[j] = corr_vals
Y[j] = df[label].iloc[i]
j += 1
return X, Y
class UkbbData(DataLoader):
def __init__(self,
atlas_name='schaefer_400',
data_info_file='ukbb3000.csv',
z_score=True
):
super(UkbbData).__init__()
# Check if valid atlas name
if atlas_name not in ['AAL', 'HO_cort_maxprob_thr25-2mm', 'schaefer_100', 'schaefer_400', 'cc200', 'HO',
'JAMA_IC19', 'JAMA_IC52', "JAMA_IC7"]:
raise ValueError('atlas_name not found')
# print('Reading csv file...')
# Read the parent CSV file
data_info = data_info_file
# Shuffle dataset
data_info = shuffle(data_info, random_state=1)
# Determine the nchannels (=nrois) from the data by using the first sample
sample_file = data_info['tc_file'].iloc[0].replace('ATLAS', atlas_name)
nrois = pd.read_csv(sample_file).values.shape[1] - 1
self.nrois = nrois
ntime = pd.read_csv(sample_file).values.shape[0]
N_corr_mat = int(nrois * (nrois + 1) / 2)
self.total_subjects = len(data_info)
self.tc_data = np.zeros((self.total_subjects, nrois, ntime), dtype=float)
labels = np.zeros(self.total_subjects, dtype=int)
# Load data
# print('Loading data & Creating graphs....')
for i, sub_i in enumerate(data_info.index):
tc_file = data_info['tc_file'].loc[sub_i].replace('ATLAS', atlas_name)
tc_vals = pd.read_csv(tc_file).values.transpose()[1:, :ntime]
if z_score:
tc_vals = np.array(
[(tc_vals[:, i] - np.mean(tc_vals[:, i])) / np.std(tc_vals[:, i]) for i in range(tc_vals.shape[1])])
self.tc_data[i] = tc_vals.transpose()
else:
self.tc_data[i] = tc_vals
labels[i] = data_info['Age_binary'].loc[sub_i]
# 1-hot encode it
self.labels = np.eye(2)[labels]
def __len__(self):
return self.total_subjects
def __getitem__(self, index):
return self.tc_data[index], self.labels[index]
def __getallitems__(self):
return self.tc_data, self.labels
class Ukbb_corr(DataLoader):
def __init__(self,
data_info_file='ukbb3000.csv',
atlas_name='schaefer_400'
):
super(Ukbb_corr).__init__()
# Check if valid atlas name
if atlas_name not in ['AAL', 'HO_cort_maxprob_thr25-2mm', 'schaefer_100', 'schaefer_400', 'cc_200', 'HO',
'JAMA_IC19', 'JAMA_IC52', "JAMA_IC7"]:
raise ValueError('atlas_name not found')
# print('Reading csv file...')
# Read the parent CSV file
data_info = data_info_file
# Shuffle dataset
data_info = shuffle(data_info, random_state=0)
sample_file = data_info['tc_file'].iloc[0].replace('ATLAS', atlas_name)
nrois = pd.read_csv(sample_file).values.shape[1] - 1
N_corr_mat = int(nrois * (nrois + 1) / 2)
self.nrois = N_corr_mat
# Initialize an np array to store all timecourses and labels
self.total_subjects = len(data_info)
self.corr_data = np.zeros((self.total_subjects, N_corr_mat))
self.graphs = []
labels = np.zeros(self.total_subjects, dtype=int)
# Load data
# print('Loading data & Creating graphs....')
for i, sub_i in enumerate(data_info.index):
corr_file = data_info['corrmat_file'].loc[sub_i].replace('ATLAS', atlas_name)
corr_vals = np.load(corr_file)
cc_triu_ids = np.triu_indices(nrois)
cc_vector = corr_vals[cc_triu_ids]
self.corr_data[i] = cc_vector
labels[i] = data_info['Age_binary'].loc[sub_i]
self.labels = np.eye(2)[labels]
def __len__(self):
return self.total_subjects
def __getitem__(self, index):
return self.corr_data[index], self.labels[index]
def __getallitems__(self):
return self.corr_data, self.labels
class Ukbb_brainetcnn(DataLoader):
def __init__(self,
data_info_file='ukbb3000.csv',
atlas_name='schaefer_400'
):
super(Ukbb_corr).__init__()
# Check if valid atlas name
if atlas_name not in ['AAL', 'HO_cort_maxprob_thr25-2mm', 'schaefer_100', 'schaefer_400', 'cc_200', 'HO',
'JAMA_IC19', 'JAMA_IC52', "JAMA_IC7"]:
raise ValueError('atlas_name not found')
# print('Reading csv file...')
# Read the parent CSV file
data_info = data_info_file
# Shuffle dataset
data_info = shuffle(data_info, random_state=0)
sample_file = data_info['tc_file'].iloc[0].replace('ATLAS', atlas_name)
nrois = pd.read_csv(sample_file).values.shape[1] - 1
N_corr_mat = int(nrois * (nrois + 1) / 2)
self.nrois = nrois
# Initialize an np array to store all timecourses and labels
self.total_subjects = len(data_info)
self.corr_data = np.zeros((self.total_subjects, nrois,nrois))
self.graphs = []
labels = np.zeros(self.total_subjects, dtype=int)
# Load data
# print('Loading data & Creating graphs....')
for i, sub_i in enumerate(data_info.index):
corr_file = data_info['corrmat_file'].loc[sub_i].replace('ATLAS', atlas_name)
corr_vals = np.load(corr_file)
self.corr_data[i] = corr_vals
labels[i] = data_info['Age_binary'].loc[sub_i]
self.labels = np.eye(2)[labels]
self.corr_data = np.expand_dims(self.corr_data,1)
def __len__(self):
return self.total_subjects
def __getitem__(self, index):
return self.corr_data[index], self.labels[index]
def __getallitems__(self):
return self.corr_data, self.labels
| 31.652174
| 120
| 0.592582
| 991
| 7,280
| 4.068618
| 0.14329
| 0.059524
| 0.050595
| 0.03125
| 0.839286
| 0.818204
| 0.810764
| 0.786954
| 0.773313
| 0.747272
| 0
| 0.020628
| 0.2875
| 7,280
| 230
| 121
| 31.652174
| 0.756699
| 0.09011
| 0
| 0.746575
| 0
| 0
| 0.090427
| 0.01136
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09589
| false
| 0
| 0.027397
| 0.061644
| 0.219178
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7d15be6bbbcd532df4119c1405f89a4aba24670
| 9,118
|
py
|
Python
|
sparse_mkl/tests/test_sparse_dense.py
|
windshirely/sparse_mkl
|
933af0e8c3bc9df7fc6e08a8b2033fbfe833b081
|
[
"MIT"
] | 1
|
2022-03-02T02:09:45.000Z
|
2022-03-02T02:09:45.000Z
|
sparse_mkl/tests/test_sparse_dense.py
|
windshirely/sparse_mkl
|
933af0e8c3bc9df7fc6e08a8b2033fbfe833b081
|
[
"MIT"
] | null | null | null |
sparse_mkl/tests/test_sparse_dense.py
|
windshirely/sparse_mkl
|
933af0e8c3bc9df7fc6e08a8b2033fbfe833b081
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import numpy.testing as npt
from sparse_mkl import dot_product_mkl
from sparse_mkl.tests.test_mkl import MATRIX_1, MATRIX_2
class TestSparseDenseMultiplication(unittest.TestCase):
order = "C"
def setUp(self):
self.mat1 = MATRIX_1.copy()
self.mat2 = MATRIX_2.copy()
self.mat1_d = MATRIX_1.A
self.mat2_d = MATRIX_2.A
def test_float32_b_sparse(self):
d1, d2 = self.mat1_d.astype(np.float32), self.mat2.astype(np.float32)
mat3 = dot_product_mkl(d1, d2, debug=True)
mat3_np = np.dot(d1, d2.A)
npt.assert_array_almost_equal(mat3_np, mat3)
mat3_np += 3.
out = np.ones(mat3_np.shape, dtype=np.float32, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3., debug=True)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(mat3_np, out)
mat3 += 1.
npt.assert_array_almost_equal(mat3, out)
self.assertEqual(id(mat3), id(out))
def test_float64_b_sparse(self):
d1, d2 = self.mat1_d, self.mat2
mat3 = dot_product_mkl(d1, d2, debug=True)
mat3_np = np.dot(d1, d2.A)
npt.assert_array_almost_equal(mat3_np, mat3)
mat3 = dot_product_mkl(d1, d2, out=np.ones(mat3_np.shape, dtype=np.float64, order=self.order), out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
def test_float64_cast_b_sparse(self):
d1, d2 = self.mat1_d.astype(np.float32), self.mat2
mat3 = dot_product_mkl(d1, d2, cast=True)
mat3_np = np.dot(d1, d2.A)
npt.assert_array_almost_equal(mat3_np, mat3)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3., cast=True)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float32_csc_sparse(self):
d1, d2 = self.mat1_d.astype(np.float32), self.mat2.astype(np.float32).tocsc()
mat3_np = np.dot(d1, d2.A)
mat3 = dot_product_mkl(d1, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d2.A, self.mat2_d)
out = np.ones(mat3_np.shape, dtype=np.float32, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float32_bsr_sparse(self):
d1, d2 = self.mat1_d.astype(np.float32), self.mat2.astype(np.float32).tobsr(blocksize=(10, 10))
mat3_np = np.dot(d1, d2.A)
mat3 = dot_product_mkl(d1, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d2.A, self.mat2_d)
out = np.ones(mat3_np.shape, dtype=np.float32, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_csc_sparse(self):
d1, d2 = self.mat1_d, self.mat2.tocsc()
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1, d2.A)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d2.A, self.mat2_d)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_bsr_sparse(self):
d1, d2 = self.mat1_d, self.mat2.tobsr(blocksize=(10, 10))
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1, d2.A)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d2.A, self.mat2_d)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_cast_csc_sparse(self):
d1, d2 = self.mat1_d.astype(np.float32), self.mat2.tocsc()
mat3 = dot_product_mkl(d1, d2, cast=True)
mat3_np = np.dot(d1, d2.A)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d2.A, self.mat2_d)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3., cast=True)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_cast_bsr_sparse(self):
d1, d2 = self.mat1_d.astype(np.float32), self.mat2.tobsr(blocksize=(10, 10))
mat3 = dot_product_mkl(d1, d2, cast=True)
mat3_np = np.dot(d1, d2.A)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d2.A, self.mat2_d)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3., cast=True)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float32_a_sparse(self):
d1, d2 = self.mat1.astype(np.float32), self.mat2_d.astype(np.float32)
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1.A, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
out = np.ones(mat3_np.shape, dtype=np.float32, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_a_sparse(self):
d1, d2 = self.mat1, self.mat2_d
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1.A, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_a_csc_sparse(self):
d1, d2 = self.mat1.tocsc(), self.mat2_d
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1.A, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d1.A, self.mat1_d)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_a_bsr_sparse(self):
d1, d2 = self.mat1.tobsr(blocksize=(10, 10)), self.mat2_d
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1.A, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d1.A, self.mat1_d)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float32_a_csc_sparse(self):
d1, d2 = self.mat1.astype(np.float32).tocsc(), self.mat2_d.astype(np.float32)
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1.A, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d1.A, self.mat1_d)
out = np.ones(mat3_np.shape, dtype=np.float32, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float32_a_bsr_sparse(self):
d1, d2 = self.mat1.astype(np.float32).tobsr(blocksize=(10, 10)), self.mat2_d.astype(np.float32)
mat3 = dot_product_mkl(d1, d2)
mat3_np = np.dot(d1.A, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
npt.assert_array_almost_equal(d1.A, self.mat1_d)
out = np.ones(mat3_np.shape, dtype=np.float32, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3.)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
def test_float64_cast_a_sparse(self):
d1, d2 = self.mat1.astype(np.float32), self.mat2_d
mat3 = dot_product_mkl(d1, d2, cast=True)
mat3_np = np.dot(d1.A, d2)
npt.assert_array_almost_equal(mat3_np, mat3)
out = np.ones(mat3_np.shape, dtype=np.float64, order=self.order)
mat3 = dot_product_mkl(d1, d2, out=out, out_scalar=3., cast=True)
npt.assert_array_almost_equal(mat3_np + 3., mat3)
self.assertEqual(id(mat3), id(out))
class TestSparseDenseFMultiplication(TestSparseDenseMultiplication):
order = "F"
def setUp(self):
self.mat1 = MATRIX_1.copy()
self.mat2 = MATRIX_2.copy()
self.mat1_d = np.asarray(MATRIX_1.A, order="F")
self.mat2_d = np.asarray(MATRIX_2.A, order="F")
| 36.039526
| 117
| 0.655078
| 1,464
| 9,118
| 3.821038
| 0.041667
| 0.07079
| 0.110118
| 0.157311
| 0.928316
| 0.928316
| 0.923132
| 0.905434
| 0.893457
| 0.856096
| 0
| 0.069236
| 0.217482
| 9,118
| 252
| 118
| 36.18254
| 0.714786
| 0
| 0
| 0.708571
| 0
| 0
| 0.000439
| 0
| 0
| 0
| 0
| 0
| 0.337143
| 1
| 0.102857
| false
| 0
| 0.028571
| 0
| 0.154286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7fcd014380bfd15beb37f1b21f8102ac3518545
| 107,575
|
py
|
Python
|
PublicDataReader/PublicDataPortal/molit.py
|
brainer3220/PublicDataReader
|
ae7cd9c30eea44082bd6bf626d433ddf40903446
|
[
"MIT"
] | null | null | null |
PublicDataReader/PublicDataPortal/molit.py
|
brainer3220/PublicDataReader
|
ae7cd9c30eea44082bd6bf626d433ddf40903446
|
[
"MIT"
] | 9
|
2020-12-14T12:51:23.000Z
|
2022-02-09T09:12:37.000Z
|
PublicDataReader/PublicDataPortal/molit.py
|
brainer3220/PublicDataReader
|
ae7cd9c30eea44082bd6bf626d433ddf40903446
|
[
"MIT"
] | null | null | null |
"""
국토교통부 Open API
molit(Ministry of Land, Infrastructure and Transport)
1. Transaction 클래스: 부동산 실거래가 조회
- AptTrade: 아파트매매 실거래자료 조회
- AptTradeDetail: 아파트매매 실거래 상세 자료 조회
- AptRent: 아파트 전월세 자료 조회
- AptOwnership: 아파트 분양권전매 신고 자료 조회
- OffiTrade: 오피스텔 매매 신고 조회
- OffiRent: 오피스텔 전월세 신고 조회
- RHTrade: 연립다세대 매매 실거래자료 조회
- RHRent: 연립다세대 전월세 실거래자료 조회
- DHTrade: 단독/다가구 매매 실거래 조회
- DHRent: 단독/다가구 전월세 자료 조회
- LandTrade: 토지 매매 신고 조회
- BizTrade: 상업업무용 부동산 매매 신고 자료 조회
2. Building 클래스: 건축물대장정보 서비스
01 건축물대장 기본개요 조회: getBrBasisOulnInfo
02 건축물대장 총괄표제부 조회: getBrRecapTitleInfo
03 건축물대장 표제부 조회: getBrTitleInfo
04 건축물대장 층별개요 조회: getBrFlrOulnInfo
05 건축물대장 부속지번 조회: getBrAtchJibunInfo
06 건축물대장 전유공용면적 조회: getBrExposPubuseAreaInfo
07 건축물대장 오수정화시설 조회: getBrWclfInfo
08 건축물대장 주택가격 조회: getBrHsprcInfo
09 건축물대장 전유부 조회: getBrExposInfo
10 건축물대장 지역지구구역 조회: getBrJijiguInfo
"""
import datetime
import numpy as np
import pandas as pd
import requests
from bs4 import BeautifulSoup
class Transaction:
"""
부동산 실거래가 조회 클래스
"""
def __init__(self, serviceKey):
"""
공공 데이터 포털에서 발급받은 Service Key를 입력받아 초기화합니다.
"""
# Open API 서비스 키 초기화
self.serviceKey = serviceKey
# ServiceKey 유효성 검사
self.urlAptTrade = (
"http://openapi.molit.go.kr:8081/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcAptTrade?serviceKey="
+ self.serviceKey)
self.urlAptTradeDetail = (
"http://openapi.molit.go.kr/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcAptTradeDev?serviceKey="
+ self.serviceKey)
self.urlAptRent = (
"http://openapi.molit.go.kr:8081/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcAptRent?serviceKey="
+ self.serviceKey)
self.urlAptOwnership = (
"http://openapi.molit.go.kr/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcSilvTrade?serviceKey="
+ self.serviceKey)
self.urlOffiTrade = (
"http://openapi.molit.go.kr/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcOffiTrade?serviceKey="
+ self.serviceKey)
self.urlOffiRent = (
"http://openapi.molit.go.kr/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcOffiRent?serviceKey="
+ self.serviceKey)
self.urlRHTrade = (
"http://openapi.molit.go.kr:8081/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcRHTrade?serviceKey="
+ self.serviceKey)
self.urlRHRent = (
"http://openapi.molit.go.kr:8081/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcRHRent?serviceKey="
+ self.serviceKey)
self.urlDHTrade = (
"http://openapi.molit.go.kr:8081/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcSHTrade?serviceKey="
+ self.serviceKey)
self.urlDHRent = (
"http://openapi.molit.go.kr:8081/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcSHRent?serviceKey="
+ self.serviceKey)
self.urlLandTrade = (
"http://openapi.molit.go.kr/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcLandTrade?serviceKey="
+ self.serviceKey)
self.urlBizTrade = (
"http://openapi.molit.go.kr/OpenAPI_ToolInstallPackage/service/rest/RTMSOBJSvc/getRTMSDataSvcNrgTrade?serviceKey="
+ self.serviceKey)
# Open API URL Dict
urlDict = {
"아파트매매 실거래자료 조회": self.urlAptTrade,
"아파트매매 실거래 상세 자료 조회": self.urlAptTradeDetail,
"아파트 전월세 자료 조회": self.urlAptRent,
"아파트 분양권전매 신고 자료 조회": self.urlAptOwnership,
"오피스텔 매매 신고 조회": self.urlOffiTrade,
"오피스텔 전월세 신고 조회": self.urlOffiRent,
"연립다세대 매매 실거래자료 조회": self.urlRHTrade,
"연립다세대 전월세 실거래자료 조회": self.urlRHRent,
"단독/다가구 매매 실거래 조회": self.urlDHTrade,
"단독/다가구 전월세 자료 조회": self.urlDHRent,
"토지 매매 신고 조회": self.urlLandTrade,
"상업업무용 부동산 매매 신고 자료 조회": self.urlBizTrade,
}
# 서비스 정상 작동 여부 확인
for serviceName, url in urlDict.items():
result = requests.get(url, verify=False)
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
te = xmlsoup.findAll("header")
if te[0].find("resultCode").text == "00":
print(f">>> {serviceName} 서비스가 정상 작동합니다.")
else:
print(f">>> {serviceName} 서비스키 미등록 오류입니다.")
# 지역 코드 초기화
# 법정동 코드 출처 : https://code.go.kr
path_code = "https://raw.githubusercontent.com/WooilJeong/PublicDataReader/f14e4de3410cc0f798a83ee5934070d651cbd67b/docs/%EB%B2%95%EC%A0%95%EB%8F%99%EC%BD%94%EB%93%9C%20%EC%A0%84%EC%B2%B4%EC%9E%90%EB%A3%8C.txt"
code = pd.read_csv(path_code, encoding="cp949", sep="\t")
code = code.loc[code["폐지여부"] == "존재"]
code["법정구코드"] = list(map(lambda a: str(a)[:5], list(code["법정동코드"])))
self.code = code
def CodeFinder(self, name):
"""
국토교통부 실거래가 정보 오픈API는 법정동코드 10자리 중 앞 5자리인 구를 나타내는 지역코드를 사용합니다.
API에 사용할 구 별 코드를 조회하는 메서드이며, 문자열 지역 명을 입력받고, 조회 결과를 Pandas DataFrame형식으로 출력합니다.
"""
result = self.code[self.code["법정동명"].str.contains(name)][[
"법정동명", "법정구코드"
]]
result.index = range(len(result))
return result
def DataCollector(self, service, LAWD_CD, start_date, end_date):
"""
서비스별 기간별 조회
입력: 서비스별 조회 메서드, 지역코드, 시작월(YYYYmm), 종료월(YYYYmm)
"""
start_date = datetime.datetime.strptime(str(start_date), "%Y%m")
start_date = datetime.datetime.strftime(start_date, "%Y-%m")
end_date = datetime.datetime.strptime(str(end_date), "%Y%m")
end_date = end_date + datetime.timedelta(days=31)
end_date = datetime.datetime.strftime(end_date, "%Y-%m")
ts = pd.date_range(start=start_date, end=end_date, freq="m")
date_list = list(ts.strftime("%Y%m"))
df = pd.DataFrame()
df_sum = pd.DataFrame()
for m in date_list:
print(">>> LAWD_CD :", LAWD_CD, "DEAL_YMD :", m)
DEAL_YMD = m
df = service(LAWD_CD, DEAL_YMD)
df_sum = pd.concat([df_sum, df])
df_sum.index = range(len(df_sum))
return df_sum
def AptTrade(self, LAWD_CD, DEAL_YMD):
"""
01 아파트매매 실거래자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlAptTrade + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"아파트",
"지번",
"년",
"월",
"일",
"건축년도",
"전용면적",
"층",
"거래금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 아파트, 지번, 년, 월, 일, 건축년도, 전용면적, 층, 거래금액]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드", "법정동", "거래일", "아파트", "지번", "전용면적", "층", "건축년도", "거래금액"
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df["아파트"] = df["아파트"].str.strip()
df.index = range(len(df))
# 형 변환
cols = df.columns.drop(["법정동", "거래일", "아파트", "지번"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def AptTradeDetail(self, LAWD_CD, DEAL_YMD):
"""
02 아파트매매 실거래 상세 자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlAptTradeDetail + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"거래금액",
"건축년도",
"년",
"도로명",
"도로명건물본번호코드",
"도로명건물부번호코드",
"도로명시군구코드",
"도로명일련번호코드",
"도로명지상지하코드",
"도로명코드",
"법정동",
"법정동본번코드",
"법정동부번코드",
"법정동시군구코드",
"법정동읍면동코드",
"법정동지번코드",
"아파트",
"월",
"일",
"전용면적",
"지번",
"지역코드",
"층",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
거래금액,
건축년도,
년,
도로명,
도로명건물본번호코드,
도로명건물부번호코드,
도로명시군구코드,
도로명일련번호코드,
도로명지상지하코드,
도로명코드,
법정동,
법정동본번코드,
법정동부번코드,
법정동시군구코드,
법정동읍면동코드,
법정동지번코드,
아파트,
월,
일,
전용면적,
지번,
지역코드,
층,
]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"아파트",
"지번",
"전용면적",
"층",
"건축년도",
"거래금액",
"법정동본번코드",
"법정동부번코드",
"법정동시군구코드",
"법정동읍면동코드",
"법정동지번코드",
"도로명",
"도로명건물본번호코드",
"도로명건물부번호코드",
"도로명시군구코드",
"도로명일련번호코드",
"도로명지상지하코드",
"도로명코드",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df["아파트"] = df["아파트"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "아파트", "지번", "도로명"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def AptRent(self, LAWD_CD, DEAL_YMD):
"""
03 아파트 전월세 자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlAptRent + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"아파트",
"지번",
"년",
"월",
"일",
"건축년도",
"전용면적",
"층",
"보증금액",
"월세금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 아파트, 지번, 년, 월, 일, 건축년도, 전용면적, 층, 보증금액, 월세금액]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"아파트",
"지번",
"전용면적",
"층",
"건축년도",
"보증금액",
"월세금액",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["보증금액"] = pd.to_numeric(df["보증금액"].str.replace(",", ""))
df["월세금액"] = pd.to_numeric(df["월세금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "지번", "아파트"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def AptOwnership(self, LAWD_CD, DEAL_YMD):
"""
04 아파트 분양권전매 신고 자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlAptOwnership + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"시군구",
"단지",
"지번",
"구분",
"년",
"월",
"일",
"전용면적",
"층",
"거래금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 시군구, 단지, 지번, 구분, 년, 월, 일, 전용면적, 층, 거래금액]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"시군구",
"단지",
"지번",
"구분",
"전용면적",
"층",
"거래금액",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "시군구", "단지", "지번", "구분"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def OffiTrade(self, LAWD_CD, DEAL_YMD):
"""
05 오피스텔 매매 신고 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlOffiTrade + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"시군구",
"단지",
"지번",
"년",
"월",
"일",
"전용면적",
"층",
"거래금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 시군구, 단지, 지번, 년, 월, 일, 전용면적, 층, 거래금액]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드", "법정동", "거래일", "시군구", "단지", "지번", "전용면적", "층", "거래금액"
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "시군구", "단지", "지번"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def OffiRent(self, LAWD_CD, DEAL_YMD):
"""
06 오피스텔 전월세 신고 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlOffiRent + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"시군구",
"단지",
"지번",
"년",
"월",
"일",
"전용면적",
"층",
"보증금",
"월세",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 시군구, 단지, 지번, 년, 월, 일, 전용면적, 층, 보증금, 월세]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"시군구",
"단지",
"지번",
"전용면적",
"층",
"보증금",
"월세",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["보증금"] = pd.to_numeric(df["보증금"].str.replace(",", ""))
df["월세"] = pd.to_numeric(df["월세"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "시군구", "단지", "지번"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def RHTrade(self, LAWD_CD, DEAL_YMD):
"""
07 연립다세대 매매 실거래자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlRHTrade + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"연립다세대",
"지번",
"년",
"월",
"일",
"전용면적",
"건축년도",
"층",
"거래금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 연립다세대, 지번, 년, 월, 일, 전용면적, 건축년도, 층, 거래금액]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"연립다세대",
"지번",
"전용면적",
"건축년도",
"층",
"거래금액",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "연립다세대", "지번"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def RHRent(self, LAWD_CD, DEAL_YMD):
"""
08 연립다세대 전월세 실거래자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlRHRent + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"연립다세대",
"지번",
"년",
"월",
"일",
"전용면적",
"건축년도",
"층",
"보증금액",
"월세금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
법정동, 지역코드, 연립다세대, 지번, 년, 월, 일, 전용면적, 건축년도, 층, 보증금액,
월세금액
]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"연립다세대",
"지번",
"전용면적",
"건축년도",
"층",
"보증금액",
"월세금액",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["보증금액"] = pd.to_numeric(df["보증금액"].str.replace(",", ""))
df["월세금액"] = pd.to_numeric(df["월세금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "연립다세대", "지번"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def DHTrade(self, LAWD_CD, DEAL_YMD):
"""
09 단독/다가구 매매 실거래 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlDHTrade + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"주택유형",
"년",
"월",
"일",
"대지면적",
"연면적",
"건축년도",
"거래금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 주택유형, 년, 월, 일, 대지면적, 연면적, 건축년도, 거래금액]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드", "법정동", "거래일", "주택유형", "대지면적", "연면적", "건축년도", "거래금액"
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일", "주택유형"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def DHRent(self, LAWD_CD, DEAL_YMD):
"""
10 단독/다가구 전월세 자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlDHRent + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = ["법정동", "지역코드", "년", "월", "일", "계약면적", "보증금액", "월세금액"]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame([[법정동, 지역코드, 년, 월, 일, 계약면적, 보증금액, 월세금액]],
columns=variables)
df = pd.concat([df, data])
# Set Columns
colNames = ["지역코드", "법정동", "거래일", "계약면적", "보증금액", "월세금액"]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["보증금액"] = pd.to_numeric(df["보증금액"].str.replace(",", ""))
df["월세금액"] = pd.to_numeric(df["월세금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(["법정동", "거래일"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def LandTrade(self, LAWD_CD, DEAL_YMD):
"""
11 토지 매매 신고 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlLandTrade + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"법정동",
"지역코드",
"시군구",
"용도지역",
"지목",
"년",
"월",
"일",
"지분거래구분",
"거래면적",
"거래금액",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[법정동, 지역코드, 시군구, 용도지역, 지목, 년, 월, 일, 지분거래구분, 거래면적, 거래금액]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"시군구",
"용도지역",
"지목",
"지분거래구분",
"거래면적",
"거래금액",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(
["법정동", "거래일", "시군구", "용도지역", "지목", "지분거래구분"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def BizTrade(self, LAWD_CD, DEAL_YMD):
"""
12 상업업무용 부동산 매매 신고 자료 조회
입력: 지역코드(법정동코드 5자리), 계약월(YYYYmm)
"""
# URL
url_1 = self.urlBizTrade + "&LAWD_CD=" + str(LAWD_CD)
url_2 = "&DEAL_YMD=" + str(DEAL_YMD)
url_3 = "&numOfRows=99999"
url = url_1 + url_2 + url_3
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"거래금액",
"건물면적",
"건물주용도",
"건축년도",
"구분",
"년",
"월",
"일",
"대지면적",
"법정동",
"시군구",
"용도지역",
"유형",
"지역코드",
"층",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
거래금액,
건물면적,
건물주용도,
건축년도,
구분,
년,
월,
일,
대지면적,
법정동,
시군구,
용도지역,
유형,
지역코드,
층,
]],
columns=variables,
)
df = pd.concat([df, data])
# Set Columns
colNames = [
"지역코드",
"법정동",
"거래일",
"시군구",
"용도지역",
"유형",
"대지면적",
"구분",
"건물면적",
"건물주용도",
"건축년도",
"층",
"거래금액",
]
# Feature Engineering
try:
if len(df["년"] != 0) & len(df["월"] != 0) & len(df["일"] != 0):
df["거래일"] = df["년"] + "-" + df["월"] + "-" + df["일"]
df["거래일"] = pd.to_datetime(df["거래일"])
df["거래금액"] = pd.to_numeric(df["거래금액"].str.replace(",", ""))
except:
df = pd.DataFrame(columns=colNames)
print("조회할 자료가 없습니다.")
# Arange Columns
df = df[colNames]
df = df.sort_values(["법정동", "거래일"])
df["법정동"] = df["법정동"].str.strip()
df.index = range(len(df))
# 숫자형 변환
cols = df.columns.drop(
["법정동", "거래일", "시군구", "용도지역", "유형", "건물주용도"])
df[cols] = df[cols].apply(pd.to_numeric, errors="coerce")
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
class Building:
"""
건축물대장정보 서비스
"""
def __init__(self, serviceKey):
"""
공공 데이터 포털에서 발급받은 Service Key를 입력받아 초기화합니다.
"""
# Open API 서비스 키 초기화
self.serviceKey = serviceKey
# ServiceKey 유효성 검사
self.baseUrl = "http://apis.data.go.kr/1613000/BldRgstService_v2/"
self.url_getBrBasisOulnInfo = (self.baseUrl + "getBrBasisOulnInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrRecapTitleInfo = (self.baseUrl + "getBrRecapTitleInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrTitleInfo = (self.baseUrl + "getBrTitleInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrFlrOulnInfo = (self.baseUrl + "getBrFlrOulnInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrAtchJibunInfo = (self.baseUrl + "getBrAtchJibunInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrExposPubuseAreaInfo = (self.baseUrl +
"getBrExposPubuseAreaInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrWclfInfo = (self.baseUrl + "getBrWclfInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrHsprcInfo = (self.baseUrl + "getBrHsprcInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrExposInfo = (self.baseUrl + "getBrExposInfo" +
f"?serviceKey={self.serviceKey}")
self.url_getBrJijiguInfo = (self.baseUrl + "getBrJijiguInfo" +
f"?serviceKey={self.serviceKey}")
# Open API URL Dict
urlDict = {
"건축물대장 기본개요 조회": self.url_getBrBasisOulnInfo,
"건축물대장 총괄표제부 조회": self.url_getBrRecapTitleInfo,
"건축물대장 표제부 조회": self.url_getBrTitleInfo,
"건축물대장 층별개요 조회": self.url_getBrFlrOulnInfo,
"건축물대장 부속지번 조회": self.url_getBrAtchJibunInfo,
"건축물대장 전유공용면적 조회": self.url_getBrExposPubuseAreaInfo,
"건축물대장 오수정화시설 조회": self.url_getBrWclfInfo,
"건축물대장 주택가격 조회": self.url_getBrHsprcInfo,
"건축물대장 전유부 조회": self.url_getBrExposInfo,
"건축물대장 지역지구구역 조회": self.url_getBrJijiguInfo,
}
# 서비스 정상 작동 여부 확인
for serviceName, url in urlDict.items():
result = requests.get(url, verify=False)
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
te = xmlsoup.findAll("header")
if te[0].find("resultCode").text == "00":
print(f">>> {serviceName} 서비스가 정상 작동합니다.")
else:
print(f">>> {serviceName} 서비스키 미등록 오류입니다.")
# 지역 코드 초기화
# 법정동 코드 출처 : https://code.go.kr
path_code = "https://raw.githubusercontent.com/WooilJeong/PublicDataReader/f14e4de3410cc0f798a83ee5934070d651cbd67b/docs/%EB%B2%95%EC%A0%95%EB%8F%99%EC%BD%94%EB%93%9C%20%EC%A0%84%EC%B2%B4%EC%9E%90%EB%A3%8C.txt"
code = pd.read_csv(path_code, encoding="cp949", sep="\t")
code = code.loc[code["폐지여부"] == "존재"]
code["법정구코드"] = list(map(lambda a: str(a)[:5], list(code["법정동코드"])))
self.code = code
def CodeFinder(self, name):
"""
국토교통부 실거래가 정보 오픈API는 법정동코드 10자리 중 앞 5자리인 구를 나타내는 지역코드를 사용합니다.
API에 사용할 구 별 코드를 조회하는 메서드이며, 문자열 지역 명을 입력받고, 조회 결과를 Pandas DataFrame형식으로 출력합니다.
"""
result = self.code[self.code["법정동명"].str.contains(name)][[
"법정동명", "법정구코드"
]]
result.index = range(len(result))
return result
def ChangeCols(self, df, operationName):
"""
영문 컬럼명을 국문 컬럼명으로 변경
"""
if operationName == "getBrBasisOulnInfo":
self.colDict = {
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"bylotCnt": "외필지수",
"crtnDay": "생성일자",
"guyukCd": "구역코드",
"guyukCdNm": "구역코드명",
"ji": "지",
"jiguCd": "지구코드",
"jiguCdNm": "지구코드명",
"jiyukCd": "지역코드",
"jiyukCdNm": "지역코드명",
"lot": "로트",
"mgmBldrgstPk": "관리건축물대장PK",
"mgmUpBldrgstPk": "관리상위건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
}
elif operationName == "getBrRecapTitleInfo":
self.colDict = {
"archArea": "건축면적",
"atchBldArea": "부속건축물면적",
"atchBldCnt": "부속건축물수",
"bcRat": "건폐율",
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"bylotCnt": "외필지수",
"crtnDay": "생성일자",
"engrEpi": "EPI점수",
"engrGrade": "에너지효율등급",
"engrRat": "에너지절감율",
"etcPurps": "기타용도",
"fmlyCnt": "가구수",
"gnBldCert": "친환경건축물인증점수",
"gnBldGrade": "친환경건축물등급",
"hhldCnt": "세대수",
"hoCnt": "호수",
"indrAutoArea": "옥내자주식면적",
"indrAutoUtcnt": "옥내자주식대수",
"indrMechArea": "옥내기계식면적",
"indrMechUtcnt": "옥내기계식대수",
"itgBldCert": "지능형건축물인증점수",
"itgBldGrade": "지능형건축물등급",
"ji": "지",
"lot": "로트",
"mainBldCnt": "주건축물수",
"mainPurpsCd": "주용도코드",
"mainPurpsCdNm": "주용도코드명",
"mgmBldrgstPk": "관리건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newOldRegstrGbCd": "신구대장구분코드",
"newOldRegstrGbCdNm": "신구대장구분코드명",
"newPlatPlc": "도로명대지위치",
"oudrAutoArea": "옥외자주식면적",
"oudrAutoUtcnt": "옥외자주식대수",
"oudrMechArea": "옥외기계식면적",
"oudrMechUtcnt": "옥외기계식대수",
"platArea": "대지면적",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"pmsDay": "허가일",
"pmsnoGbCd": "허가번호구분코드",
"pmsnoGbCdNm": "허가번호구분코드명",
"pmsnoKikCd": "허가번호기관코드",
"pmsnoKikCdNm": "허가번호기관코드명",
"pmsnoYear": "허가번호년",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
"stcnsDay": "착공일",
"totArea": "연면적",
"totPkngCnt": "총주차수",
"useAprDay": "사용승인일",
"vlRat": "용적률",
"vlRatEstmTotArea": "용적률산정연면적",
}
elif operationName == "getBrTitleInfo":
self.colDict = {
"archArea": "건축면적",
"atchBldArea": "부속건축물면적",
"atchBldCnt": "부속건축물수",
"bcRat": "건폐율",
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"bylotCnt": "외필지수",
"crtnDay": "생성일자",
"dongNm": "동명칭",
"emgenUseElvtCnt": "비상용승강기수",
"engrEpi": "EPI점수",
"engrGrade": "에너지효율등급",
"engrRat": "에너지절감율",
"etcPurps": "기타용도",
"etcRoof": "기타지붕",
"etcStrct": "기타구조",
"fmlyCnt": "가구수",
"gnBldCert": "친환경건축물인증점수",
"gnBldGrade": "친환경건축물등급",
"grndFlrCnt": "지상층수",
"heit": "높이",
"hhldCnt": "세대수",
"hoCnt": "호수",
"indrAutoArea": "옥내자주식면적",
"indrAutoUtcnt": "옥내자주식대수",
"indrMechArea": "옥내기계식면적",
"indrMechUtcnt": "옥내기계식대수",
"itgBldCert": "지능형건축물인증점수",
"itgBldGrade": "지능형건축물등급",
"ji": "지",
"lot": "로트",
"mainAtchGbCd": "주부속구분코드",
"mainAtchGbCdNm": "주부속구분코드명",
"mainPurpsCd": "주용도코드",
"mainPurpsCdNm": "주용도코드명",
"mgmBldrgstPk": "관리건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"oudrAutoArea": "옥외자주식면적",
"oudrAutoUtcnt": "옥외자주식대수",
"oudrMechArea": "옥외기계식면적",
"oudrMechUtcnt": "옥외기계식대수",
"platArea": "대지면적",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"pmsDay": "허가일",
"pmsnoGbCd": "허가번호구분코드",
"pmsnoGbCdNm": "허가번호구분코드명",
"pmsnoKikCd": "허가번호기관코드",
"pmsnoKikCdNm": "허가번호기관코드명",
"pmsnoYear": "허가번호년",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rideUseElvtCnt": "승용승강기수",
"rnum": "순번",
"roofCd": "지붕코드",
"roofCdNm": "지붕코드명",
"rserthqkAblty": "내진 능력",
"rserthqkDsgnApplyYn": "내진 설계 적용 여부",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
"stcnsDay": "착공일",
"strctCd": "구조코드",
"strctCdNm": "구조코드명",
"totArea": "연면적",
"totDongTotArea": "총동연면적",
"ugrndFlrCnt": "지하층수",
"useAprDay": "사용승인일",
"vlRat": "용적률",
"vlRatEstmTotArea": "용적률산정연면적",
}
elif operationName == "getBrFlrOulnInfo":
self.colDict = colDict = {
"area": "면적",
"areaExctYn": "면적제외여부",
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"crtnDay": "생성일자",
"dongNm": "동명칭",
"etcPurps": "기타용도",
"etcStrct": "기타구조",
"flrGbCd": "층구분코드",
"flrGbCdNm": "층구분코드명",
"flrNo": "층번호",
"flrNoNm": "층번호명",
"ji": "지",
"lot": "로트",
"mainAtchGbCd": "주부속구분코드",
"mainAtchGbCdNm": "주부속구분코드명",
"mainPurpsCd": "주용도코드",
"mainPurpsCdNm": "주용도코드명",
"mgmBldrgstPk": "관리건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
"strctCd": "구조코드",
"strctCdNm": "구조코드명",
}
elif operationName == "getBrAtchJibunInfo":
self.colDict = colDict = {
"atchBjdongCd": "부속법정동코드",
"atchBlock": "부속블록",
"atchBun": "부속번",
"atchEtcJibunNm": "부속기타지번명",
"atchJi": "부속지",
"atchLot": "부속로트",
"atchPlatGbCd": "부속대지구분코드",
"atchRegstrGbCd": "부속대장구분코드",
"atchRegstrGbCdNm": "부속대장구분코드명",
"atchSigunguCd": "부속시군구코드",
"atchSplotNm": "부속특수지명",
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"crtnDay": "생성일자",
"ji": "지",
"lot": "로트",
"mgmBldrgstPk": "관리건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
}
elif operationName == "getBrExposPubuseAreaInfo":
self.colDict = colDict = {
"area": "면적",
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"crtnDay": "생성일자",
"dongNm": "동명칭",
"etcPurps": "기타용도",
"etcStrct": "기타구조",
"exposPubuseGbCd": "전유공용구분코드",
"exposPubuseGbCdNm": "전유공용구분코드명",
"flrGbCd": "층구분코드",
"flrGbCdNm": "층구분코드명",
"flrNo": "층번호",
"flrNoNm": "층번호명",
"hoNm": "호명칭",
"ji": "지",
"lot": "로트",
"mainAtchGbCd": "주부속구분코드",
"mainAtchGbCdNm": "주부속구분코드명",
"mainPurpsCd": "주용도코드",
"mainPurpsCdNm": "주용도코드명",
"mgmBldrgstPk": "관리건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
"strctCd": "구조코드",
"strctCdNm": "구조코드명",
}
elif operationName == "getBrWclfInfo":
self.colDict = colDict = {
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"capaLube": "용량(루베)",
"capaPsper": "용량(인용)",
"crtnDay": "생성일자",
"etcMode": "기타형식",
"ji": "지",
"lot": "로트",
"mgmBldrgstPk": "관리건축물대장PK",
"modeCd": "형식코드",
"modeCdNm": "형식코드명",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
"unitGbCd": "단위구분코드",
"unitGbCdNm": "단위구분코드명",
}
elif operationName == "getBrHsprcInfo":
self.colDict = colDict = {
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"bylotCnt": "외필지수",
"crtnDay": "생성일자",
"hsprc": "주택가격",
"ji": "지",
"lot": "로트",
"mgmBldrgstPk": "관리건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
}
elif operationName == "getBrExposInfo":
self.colDict = colDict = {
"bjdongCd": "법정동코드",
"bldNm": "건물명",
"block": "블록",
"bun": "번",
"crtnDay": "생성일자",
"dongNm": "동명칭",
"flrGbCd": "층구분코드",
"flrGbCdNm": "층구분코드명",
"flrNo": "층번호",
"hoNm": "호명칭",
"ji": "지",
"lot": "로트",
"mgmBldrgstPk": "관리건축물대장PK",
"naBjdongCd": "새주소법정동코드",
"naMainBun": "새주소본번",
"naRoadCd": "새주소도로코드",
"naSubBun": "새주소부번",
"naUgrndCd": "새주소지상지하코드",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"regstrGbCd": "대장구분코드",
"regstrGbCdNm": "대장구분코드명",
"regstrKindCd": "대장종류코드",
"regstrKindCdNm": "대장종류코드명",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
}
elif operationName == "getBrJijiguInfo":
self.colDict = colDict = {
"bjdongCd": "법정동코드",
"block": "블록",
"bun": "번",
"crtnDay": "생성일자",
"etcJijigu": "기타지역지구구역",
"ji": "지",
"jijiguCd": "지역지구구역코드",
"jijiguCdNm": "지역지구구역코드명",
"jijiguGbCd": "지역지구구역구분코드",
"jijiguGbCdNm": "지역지구구역구분코드명",
"lot": "로트",
"mgmBldrgstPk": "관리건축물대장PK",
"newPlatPlc": "도로명대지위치",
"platGbCd": "대지구분코드",
"platPlc": "대지위치",
"reprYn": "대표여부",
"rnum": "순번",
"sigunguCd": "시군구코드",
"splotNm": "특수지명",
}
df = df.rename(columns=self.colDict)
return df
def getBrBasisOulnInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
01 건축물대장 기본개요 조회
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지
"""
# URL
url = f"{self.url_getBrBasisOulnInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"bjdongCd",
"bldNm",
"block",
"bun",
"bylotCnt",
"crtnDay",
"guyukCd",
"guyukCdNm",
"ji",
"jiguCd",
"jiguCdNm",
"jiyukCd",
"jiyukCdNm",
"lot",
"mgmBldrgstPk",
"mgmUpBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"platGbCd",
"platPlc",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rnum",
"sigunguCd",
"splotNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
bjdongCd,
bldNm,
block,
bun,
bylotCnt,
crtnDay,
guyukCd,
guyukCdNm,
ji,
jiguCd,
jiguCdNm,
jiyukCd,
jiyukCdNm,
lot,
mgmBldrgstPk,
mgmUpBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
platGbCd,
platPlc,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rnum,
sigunguCd,
splotNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrRecapTitleInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
02 건축물대장 총괄표제부 조회
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrRecapTitleInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"archArea",
"atchBldArea",
"atchBldCnt",
"bcRat",
"bjdongCd",
"bldNm",
"block",
"bun",
"bylotCnt",
"crtnDay",
"engrEpi",
"engrGrade",
"engrRat",
"etcPurps",
"fmlyCnt",
"gnBldCert",
"gnBldGrade",
"hhldCnt",
"hoCnt",
"indrAutoArea",
"indrAutoUtcnt",
"indrMechArea",
"indrMechUtcnt",
"itgBldCert",
"itgBldGrade",
"ji",
"lot",
"mainBldCnt",
"mainPurpsCd",
"mainPurpsCdNm",
"mgmBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newOldRegstrGbCd",
"newOldRegstrGbCdNm",
"newPlatPlc",
"oudrAutoArea",
"oudrAutoUtcnt",
"oudrMechArea",
"oudrMechUtcnt",
"platArea",
"platGbCd",
"platPlc",
"pmsDay",
"pmsnoGbCd",
"pmsnoGbCdNm",
"pmsnoKikCd",
"pmsnoKikCdNm",
"pmsnoYear",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rnum",
"sigunguCd",
"splotNm",
"stcnsDay",
"totArea",
"totPkngCnt",
"useAprDay",
"vlRat",
"vlRatEstmTotArea",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
archArea,
atchBldArea,
atchBldCnt,
bcRat,
bjdongCd,
bldNm,
block,
bun,
bylotCnt,
crtnDay,
engrEpi,
engrGrade,
engrRat,
etcPurps,
fmlyCnt,
gnBldCert,
gnBldGrade,
hhldCnt,
hoCnt,
indrAutoArea,
indrAutoUtcnt,
indrMechArea,
indrMechUtcnt,
itgBldCert,
itgBldGrade,
ji,
lot,
mainBldCnt,
mainPurpsCd,
mainPurpsCdNm,
mgmBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newOldRegstrGbCd,
newOldRegstrGbCdNm,
newPlatPlc,
oudrAutoArea,
oudrAutoUtcnt,
oudrMechArea,
oudrMechUtcnt,
platArea,
platGbCd,
platPlc,
pmsDay,
pmsnoGbCd,
pmsnoGbCdNm,
pmsnoKikCd,
pmsnoKikCdNm,
pmsnoYear,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rnum,
sigunguCd,
splotNm,
stcnsDay,
totArea,
totPkngCnt,
useAprDay,
vlRat,
vlRatEstmTotArea,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrTitleInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
03 건축물대장 표제부 조회: getBrTitleInfo
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrTitleInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"archArea",
"atchBldArea",
"atchBldCnt",
"bcRat",
"bjdongCd",
"bldNm",
"block",
"bun",
"bylotCnt",
"crtnDay",
"dongNm",
"emgenUseElvtCnt",
"engrEpi",
"engrGrade",
"engrRat",
"etcPurps",
"etcRoof",
"etcStrct",
"fmlyCnt",
"gnBldCert",
"gnBldGrade",
"grndFlrCnt",
"heit",
"hhldCnt",
"hoCnt",
"indrAutoArea",
"indrAutoUtcnt",
"indrMechArea",
"indrMechUtcnt",
"itgBldCert",
"itgBldGrade",
"ji",
"lot",
"mainAtchGbCd",
"mainAtchGbCdNm",
"mainPurpsCd",
"mainPurpsCdNm",
"mgmBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"oudrAutoArea",
"oudrAutoUtcnt",
"oudrMechArea",
"oudrMechUtcnt",
"platArea",
"platGbCd",
"platPlc",
"pmsDay",
"pmsnoGbCd",
"pmsnoGbCdNm",
"pmsnoKikCd",
"pmsnoKikCdNm",
"pmsnoYear",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rideUseElvtCnt",
"rnum",
"roofCd",
"roofCdNm",
"rserthqkAblty",
"rserthqkDsgnApplyYn",
"sigunguCd",
"splotNm",
"stcnsDay",
"strctCd",
"strctCdNm",
"totArea",
"totDongTotArea",
"ugrndFlrCnt",
"useAprDay",
"vlRat",
"vlRatEstmTotArea",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
archArea,
atchBldArea,
atchBldCnt,
bcRat,
bjdongCd,
bldNm,
block,
bun,
bylotCnt,
crtnDay,
dongNm,
emgenUseElvtCnt,
engrEpi,
engrGrade,
engrRat,
etcPurps,
etcRoof,
etcStrct,
fmlyCnt,
gnBldCert,
gnBldGrade,
grndFlrCnt,
heit,
hhldCnt,
hoCnt,
indrAutoArea,
indrAutoUtcnt,
indrMechArea,
indrMechUtcnt,
itgBldCert,
itgBldGrade,
ji,
lot,
mainAtchGbCd,
mainAtchGbCdNm,
mainPurpsCd,
mainPurpsCdNm,
mgmBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
oudrAutoArea,
oudrAutoUtcnt,
oudrMechArea,
oudrMechUtcnt,
platArea,
platGbCd,
platPlc,
pmsDay,
pmsnoGbCd,
pmsnoGbCdNm,
pmsnoKikCd,
pmsnoKikCdNm,
pmsnoYear,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rideUseElvtCnt,
rnum,
roofCd,
roofCdNm,
rserthqkAblty,
rserthqkDsgnApplyYn,
sigunguCd,
splotNm,
stcnsDay,
strctCd,
strctCdNm,
totArea,
totDongTotArea,
ugrndFlrCnt,
useAprDay,
vlRat,
vlRatEstmTotArea,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrFlrOulnInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
04 건축물대장 층별개요 조회
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrFlrOulnInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"area",
"areaExctYn",
"bjdongCd",
"bldNm",
"block",
"bun",
"crtnDay",
"dongNm",
"etcPurps",
"etcStrct",
"flrGbCd",
"flrGbCdNm",
"flrNo",
"flrNoNm",
"ji",
"lot",
"mainAtchGbCd",
"mainAtchGbCdNm",
"mainPurpsCd",
"mainPurpsCdNm",
"mgmBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"platGbCd",
"platPlc",
"rnum",
"sigunguCd",
"splotNm",
"strctCd",
"strctCdNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
area,
areaExctYn,
bjdongCd,
bldNm,
block,
bun,
crtnDay,
dongNm,
etcPurps,
etcStrct,
flrGbCd,
flrGbCdNm,
flrNo,
flrNoNm,
ji,
lot,
mainAtchGbCd,
mainAtchGbCdNm,
mainPurpsCd,
mainPurpsCdNm,
mgmBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
platGbCd,
platPlc,
rnum,
sigunguCd,
splotNm,
strctCd,
strctCdNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrAtchJibunInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
05 건축물대장 부속지번 조회: getBrAtchJibunInfo
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrAtchJibunInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"atchBjdongCd",
"atchBlock",
"atchBun",
"atchEtcJibunNm",
"atchJi",
"atchLot",
"atchPlatGbCd",
"atchRegstrGbCd",
"atchRegstrGbCdNm",
"atchSigunguCd",
"atchSplotNm",
"bjdongCd",
"bldNm",
"block",
"bun",
"crtnDay",
"ji",
"lot",
"mgmBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"platGbCd",
"platPlc",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rnum",
"sigunguCd",
"splotNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
atchBjdongCd,
atchBlock,
atchBun,
atchEtcJibunNm,
atchJi,
atchLot,
atchPlatGbCd,
atchRegstrGbCd,
atchRegstrGbCdNm,
atchSigunguCd,
atchSplotNm,
bjdongCd,
bldNm,
block,
bun,
crtnDay,
ji,
lot,
mgmBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
platGbCd,
platPlc,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rnum,
sigunguCd,
splotNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrExposPubuseAreaInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
dongNm_="",
hoNm_="",
):
"""
06 건축물대장 전유공용면적 조회: getBrExposPubuseAreaInfo
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일, 동명칭, 호명칭
"""
# URL
url = f"{self.url_getBrExposPubuseAreaInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&dongNm={dongNm_}&hoNm={hoNm_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"area",
"bjdongCd",
"bldNm",
"block",
"bun",
"crtnDay",
"dongNm",
"etcPurps",
"etcStrct",
"exposPubuseGbCd",
"exposPubuseGbCdNm",
"flrGbCd",
"flrGbCdNm",
"flrNo",
"flrNoNm",
"hoNm",
"ji",
"lot",
"mainAtchGbCd",
"mainAtchGbCdNm",
"mainPurpsCd",
"mainPurpsCdNm",
"mgmBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"platGbCd",
"platPlc",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rnum",
"sigunguCd",
"splotNm",
"strctCd",
"strctCdNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
area,
bjdongCd,
bldNm,
block,
bun,
crtnDay,
dongNm,
etcPurps,
etcStrct,
exposPubuseGbCd,
exposPubuseGbCdNm,
flrGbCd,
flrGbCdNm,
flrNo,
flrNoNm,
hoNm,
ji,
lot,
mainAtchGbCd,
mainAtchGbCdNm,
mainPurpsCd,
mainPurpsCdNm,
mgmBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
platGbCd,
platPlc,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rnum,
sigunguCd,
splotNm,
strctCd,
strctCdNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrWclfInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
07 건축물대장 오수정화시설 조회: getBrWclfInfo
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrWclfInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"bjdongCd",
"bldNm",
"block",
"bun",
"capaLube",
"capaPsper",
"crtnDay",
"etcMode",
"ji",
"lot",
"mgmBldrgstPk",
"modeCd",
"modeCdNm",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"platGbCd",
"platPlc",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rnum",
"sigunguCd",
"splotNm",
"unitGbCd",
"unitGbCdNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
bjdongCd,
bldNm,
block,
bun,
capaLube,
capaPsper,
crtnDay,
etcMode,
ji,
lot,
mgmBldrgstPk,
modeCd,
modeCdNm,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
platGbCd,
platPlc,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rnum,
sigunguCd,
splotNm,
unitGbCd,
unitGbCdNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrHsprcInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
08 건축물대장 주택가격 조회: getBrHsprcInfo
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrHsprcInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"bjdongCd",
"bldNm",
"block",
"bun",
"bylotCnt",
"crtnDay",
"hsprc",
"ji",
"lot",
"mgmBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"platGbCd",
"platPlc",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rnum",
"sigunguCd",
"splotNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
bjdongCd,
bldNm,
block,
bun,
bylotCnt,
crtnDay,
hsprc,
ji,
lot,
mgmBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
platGbCd,
platPlc,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rnum,
sigunguCd,
splotNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrExposInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
09 건축물대장 전유부 조회: getBrExposInfo
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrExposInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"bjdongCd",
"bldNm",
"block",
"bun",
"crtnDay",
"dongNm",
"flrGbCd",
"flrGbCdNm",
"flrNo",
"hoNm",
"ji",
"lot",
"mgmBldrgstPk",
"naBjdongCd",
"naMainBun",
"naRoadCd",
"naSubBun",
"naUgrndCd",
"newPlatPlc",
"platGbCd",
"platPlc",
"regstrGbCd",
"regstrGbCdNm",
"regstrKindCd",
"regstrKindCdNm",
"rnum",
"sigunguCd",
"splotNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
bjdongCd,
bldNm,
block,
bun,
crtnDay,
dongNm,
flrGbCd,
flrGbCdNm,
flrNo,
hoNm,
ji,
lot,
mgmBldrgstPk,
naBjdongCd,
naMainBun,
naRoadCd,
naSubBun,
naUgrndCd,
newPlatPlc,
platGbCd,
platPlc,
regstrGbCd,
regstrGbCdNm,
regstrKindCd,
regstrKindCdNm,
rnum,
sigunguCd,
splotNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
def getBrJijiguInfo(
self,
sigunguCd_,
bjdongCd_,
platGbCd_="",
bun_="",
ji_="",
startDate_="",
endDate_="",
):
"""
10 건축물대장 지역지구구역 조회: getBrJijiguInfo
입력: 시군구코드, 법정동코드, 대지구분코드, 번, 지, 검색시작일, 검색종료일
"""
# URL
url = f"{self.url_getBrJijiguInfo}&sigunguCd={sigunguCd_}&bjdongCd={bjdongCd_}&platGbCd={platGbCd_}&bun={bun_}&ji={ji_}&startDate={startDate_}&endDate={endDate_}&numOfRows=99999"
try:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("item")
# Creating Pandas Data Frame
df = pd.DataFrame()
variables = [
"bjdongCd",
"block",
"bun",
"crtnDay",
"etcJijigu",
"ji",
"jijiguCd",
"jijiguCdNm",
"jijiguGbCd",
"jijiguGbCdNm",
"lot",
"mgmBldrgstPk",
"newPlatPlc",
"platGbCd",
"platPlc",
"reprYn",
"rnum",
"sigunguCd",
"splotNm",
]
for t in te:
for variable in variables:
try:
globals()[variable] = t.find(variable).text
except:
globals()[variable] = np.nan
data = pd.DataFrame(
[[
bjdongCd,
block,
bun,
crtnDay,
etcJijigu,
ji,
jijiguCd,
jijiguCdNm,
jijiguGbCd,
jijiguGbCdNm,
lot,
mgmBldrgstPk,
newPlatPlc,
platGbCd,
platPlc,
reprYn,
rnum,
sigunguCd,
splotNm,
]],
columns=variables,
)
df = pd.concat([df, data])
df.index = range(len(df))
return df
except:
# Get raw data
result = requests.get(url, verify=False)
# Parsing
xmlsoup = BeautifulSoup(result.text, "lxml-xml")
# Filtering
te = xmlsoup.findAll("header")
# 정상 요청시 에러 발생 -> Python 코드 에러
if te[0].find("resultCode").text == "00":
print(">>> Python Logic Error. e-mail : wooil@kakao.com")
# Open API 서비스 제공처 오류
else:
print(">>> Open API Error: {}".format(te[0].find["resultMsg"]))
| 32.529483
| 225
| 0.387469
| 8,127
| 107,575
| 5.076289
| 0.078996
| 0.007151
| 0.018955
| 0.0223
| 0.883166
| 0.859241
| 0.843146
| 0.841304
| 0.818083
| 0.817283
| 0
| 0.009932
| 0.493637
| 107,575
| 3,306
| 226
| 32.539322
| 0.74743
| 0.063044
| 0
| 0.843931
| 0
| 0.009249
| 0.181849
| 0.020807
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01079
| false
| 0
| 0.001927
| 0
| 0.023507
| 0.023507
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d90ed3f1a8ac04f915a6b8833127c1d06e11bd6
| 1,936
|
py
|
Python
|
tests/test_models.py
|
DroneMapp/powerlibs-django-eventul-models
|
e2e3c672842b39a548b56ca1562bfa949f32d64a
|
[
"MIT"
] | 1
|
2017-04-28T18:24:54.000Z
|
2017-04-28T18:24:54.000Z
|
tests/test_models.py
|
DroneMapp/powerlibs-django-eventul-models
|
e2e3c672842b39a548b56ca1562bfa949f32d64a
|
[
"MIT"
] | 1
|
2017-08-15T17:47:26.000Z
|
2017-08-15T18:21:50.000Z
|
tests/test_models.py
|
DroneMapp/powerlibs-django-contrib-eventful
|
e2e3c672842b39a548b56ca1562bfa949f32d64a
|
[
"MIT"
] | 2
|
2020-05-01T03:53:49.000Z
|
2021-05-26T00:17:52.000Z
|
import pytest
pytestmark = pytest.mark.django_db
def test_common_eventful_model_creation_with_create(eventful_model):
obj = eventful_model(name='test 01')
obj.save()
assert obj.debug_info['pre_creation_handler_called'] == 1
assert obj.debug_info['post_creation_handler_called'] == 1
assert obj.debug_info['pre_update_handler_called'] == 0
assert obj.debug_info['post_update_handler_called'] == 0
assert obj.debug_info['pre_delete_handler_called'] == 0
assert obj.debug_info['post_delete_handler_called'] == 0
assert obj.pk is not None
def test_common_eventful_model_is_creation_context_value(eventful_model):
obj = eventful_model(name='test 01')
assert obj.get_context()['is_creation'] is True
obj2 = eventful_model(name='test 02')
obj2.id = 1
assert obj.get_context()['is_creation'] is True
def test_common_eventful_model_update(eventful_model):
obj = eventful_model(name='test 01')
obj.save()
obj.name = 'test 02'
obj.save()
assert obj.debug_info['pre_creation_handler_called'] == 1
assert obj.debug_info['post_creation_handler_called'] == 1
assert obj.debug_info['pre_update_handler_called'] == 1
assert obj.debug_info['post_update_handler_called'] == 1
assert obj.debug_info['pre_delete_handler_called'] == 0
assert obj.debug_info['post_delete_handler_called'] == 0
assert obj.get_context()['is_creation'] is False
def test_common_eventful_model_delete(eventful_model):
obj = eventful_model(name='test 01')
obj.save()
obj.delete()
assert obj.debug_info['pre_creation_handler_called'] == 1
assert obj.debug_info['post_creation_handler_called'] == 1
assert obj.debug_info['pre_update_handler_called'] == 0
assert obj.debug_info['post_update_handler_called'] == 0
assert obj.debug_info['pre_delete_handler_called'] == 1
assert obj.debug_info['post_delete_handler_called'] == 1
| 30.730159
| 73
| 0.738636
| 283
| 1,936
| 4.667845
| 0.144876
| 0.149886
| 0.190765
| 0.245269
| 0.879637
| 0.800908
| 0.800908
| 0.784254
| 0.637396
| 0.637396
| 0
| 0.020073
| 0.150826
| 1,936
| 62
| 74
| 31.225806
| 0.783455
| 0
| 0
| 0.6
| 0
| 0
| 0.282025
| 0.243285
| 0
| 0
| 0
| 0
| 0.55
| 1
| 0.1
| false
| 0
| 0.025
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4dc9bde189d67f683c3c79ad7f4b0231a4d844d8
| 38,715
|
py
|
Python
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/_operations_mixin.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-03-24T06:26:11.000Z
|
2021-04-18T15:55:59.000Z
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/_operations_mixin.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 4
|
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/_operations_mixin.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
class WebSiteManagementClientOperationsMixin(object):
def check_name_availability(
self,
name, # type: str
type, # type: Union[str, "_models.CheckNameResourceTypes"]
is_fqdn=None, # type: Optional[bool]
**kwargs # type: Any
):
"""Check if a resource name is available.
Description for Check if a resource name is available.
:param name: Resource name to verify.
:type name: str
:param type: Resource type used for verification.
:type type: str or ~azure.mgmt.web.v2020_09_01.models.CheckNameResourceTypes
:param is_fqdn: Is fully qualified domain name.
:type is_fqdn: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceNameAvailability, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.ResourceNameAvailability
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('check_name_availability')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'check_name_availability'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.check_name_availability(name, type, is_fqdn, **kwargs)
def generate_github_access_token_for_appservice_cli_async(
self,
code, # type: str
state, # type: str
**kwargs # type: Any
):
"""Exchange code for GitHub access token for AppService CLI.
Description for Exchange code for GitHub access token for AppService CLI.
:param code: Code string to exchange for Github Access token.
:type code: str
:param state: State string used for verification.
:type state: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppserviceGithubToken, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.AppserviceGithubToken
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('generate_github_access_token_for_appservice_cli_async')
if api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'generate_github_access_token_for_appservice_cli_async'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.generate_github_access_token_for_appservice_cli_async(code, state, **kwargs)
def get_publishing_user(
self,
**kwargs # type: Any
):
"""Gets publishing user.
Description for Gets publishing user.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: User, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.User
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('get_publishing_user')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'get_publishing_user'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.get_publishing_user(**kwargs)
def get_source_control(
self,
source_control_type, # type: str
**kwargs # type: Any
):
"""Gets source control token.
Description for Gets source control token.
:param source_control_type: Type of source control.
:type source_control_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SourceControl, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.SourceControl
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('get_source_control')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'get_source_control'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.get_source_control(source_control_type, **kwargs)
def get_subscription_deployment_locations(
self,
**kwargs # type: Any
):
"""Gets list of available geo regions plus ministamps.
Description for Gets list of available geo regions plus ministamps.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentLocations, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.DeploymentLocations
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('get_subscription_deployment_locations')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'get_subscription_deployment_locations'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.get_subscription_deployment_locations(**kwargs)
def list_billing_meters(
self,
billing_location=None, # type: Optional[str]
os_type=None, # type: Optional[str]
**kwargs # type: Any
):
"""Gets a list of meters for a given location.
Description for Gets a list of meters for a given location.
:param billing_location: Azure Location of billable resource.
:type billing_location: str
:param os_type: App Service OS type meters used for.
:type os_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BillingMeterCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_09_01.models.BillingMeterCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('list_billing_meters')
if api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'list_billing_meters'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.list_billing_meters(billing_location, os_type, **kwargs)
def list_geo_regions(
self,
sku=None, # type: Optional[Union[str, "_models.SkuName"]]
linux_workers_enabled=None, # type: Optional[bool]
xenon_workers_enabled=None, # type: Optional[bool]
linux_dynamic_workers_enabled=None, # type: Optional[bool]
**kwargs # type: Any
):
"""Get a list of available geographical regions.
Description for Get a list of available geographical regions.
:param sku: Name of SKU used to filter the regions.
:type sku: str or ~azure.mgmt.web.v2020_09_01.models.SkuName
:param linux_workers_enabled: Specify :code:`<code>true</code>` if you want to filter to only
regions that support Linux workers.
:type linux_workers_enabled: bool
:param xenon_workers_enabled: Specify :code:`<code>true</code>` if you want to filter to only
regions that support Xenon workers.
:type xenon_workers_enabled: bool
:param linux_dynamic_workers_enabled: Specify :code:`<code>true</code>` if you want to filter
to only regions that support Linux Consumption Workers.
:type linux_dynamic_workers_enabled: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GeoRegionCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_09_01.models.GeoRegionCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('list_geo_regions')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'list_geo_regions'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.list_geo_regions(sku, linux_workers_enabled, xenon_workers_enabled, linux_dynamic_workers_enabled, **kwargs)
def list_premier_add_on_offers(
self,
**kwargs # type: Any
):
"""List all premier add-on offers.
Description for List all premier add-on offers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PremierAddOnOfferCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_09_01.models.PremierAddOnOfferCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('list_premier_add_on_offers')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'list_premier_add_on_offers'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.list_premier_add_on_offers(**kwargs)
def list_site_identifiers_assigned_to_host_name(
self,
name_identifier, # type: "_models.NameIdentifier"
**kwargs # type: Any
):
"""List all apps that are assigned to a hostname.
Description for List all apps that are assigned to a hostname.
:param name_identifier: Hostname information.
:type name_identifier: ~azure.mgmt.web.v2020_09_01.models.NameIdentifier
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IdentifierCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_09_01.models.IdentifierCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('list_site_identifiers_assigned_to_host_name')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'list_site_identifiers_assigned_to_host_name'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.list_site_identifiers_assigned_to_host_name(name_identifier, **kwargs)
def list_skus(
self,
**kwargs # type: Any
):
"""List all SKUs.
Description for List all SKUs.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SkuInfos, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.SkuInfos
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('list_skus')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'list_skus'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.list_skus(**kwargs)
def list_source_controls(
self,
**kwargs # type: Any
):
"""Gets the source controls available for Azure websites.
Description for Gets the source controls available for Azure websites.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SourceControlCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_09_01.models.SourceControlCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('list_source_controls')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'list_source_controls'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.list_source_controls(**kwargs)
def move(
self,
resource_group_name, # type: str
move_resource_envelope, # type: "_models.CsmMoveResourceEnvelope"
**kwargs # type: Any
):
"""Move resources between resource groups.
Description for Move resources between resource groups.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param move_resource_envelope: Object that represents the resource to move.
:type move_resource_envelope: ~azure.mgmt.web.v2020_09_01.models.CsmMoveResourceEnvelope
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('move')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'move'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.move(resource_group_name, move_resource_envelope, **kwargs)
def update_publishing_user(
self,
user_details, # type: "_models.User"
**kwargs # type: Any
):
"""Updates publishing user.
Description for Updates publishing user.
:param user_details: Details of publishing user.
:type user_details: ~azure.mgmt.web.v2020_09_01.models.User
:keyword callable cls: A custom type or function that will be passed the direct response
:return: User, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.User
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('update_publishing_user')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'update_publishing_user'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.update_publishing_user(user_details, **kwargs)
def update_source_control(
self,
source_control_type, # type: str
request_message, # type: "_models.SourceControl"
**kwargs # type: Any
):
"""Updates source control token.
Description for Updates source control token.
:param source_control_type: Type of source control.
:type source_control_type: str
:param request_message: Source control token information.
:type request_message: ~azure.mgmt.web.v2020_09_01.models.SourceControl
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SourceControl, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.SourceControl
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('update_source_control')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'update_source_control'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.update_source_control(source_control_type, request_message, **kwargs)
def validate(
self,
resource_group_name, # type: str
validate_request, # type: "_models.ValidateRequest"
**kwargs # type: Any
):
"""Validate if a resource can be created.
Description for Validate if a resource can be created.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param validate_request: Request with the resources to validate.
:type validate_request: ~azure.mgmt.web.v2020_09_01.models.ValidateRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ValidateResponse, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.ValidateResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('validate')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'validate'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.validate(resource_group_name, validate_request, **kwargs)
def validate_container_settings(
self,
resource_group_name, # type: str
validate_container_settings_request, # type: "_models.ValidateContainerSettingsRequest"
**kwargs # type: Any
):
"""Validate if the container settings are correct.
Validate if the container settings are correct.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param validate_container_settings_request:
:type validate_container_settings_request: ~azure.mgmt.web.v2018_02_01.models.ValidateContainerSettingsRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: object, or the result of cls(response)
:rtype: object
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('validate_container_settings')
if api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'validate_container_settings'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.validate_container_settings(resource_group_name, validate_container_settings_request, **kwargs)
def validate_move(
self,
resource_group_name, # type: str
move_resource_envelope, # type: "_models.CsmMoveResourceEnvelope"
**kwargs # type: Any
):
"""Validate whether a resource can be moved.
Description for Validate whether a resource can be moved.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param move_resource_envelope: Object that represents the resource to move.
:type move_resource_envelope: ~azure.mgmt.web.v2020_09_01.models.CsmMoveResourceEnvelope
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('validate_move')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'validate_move'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.validate_move(resource_group_name, move_resource_envelope, **kwargs)
def verify_hosting_environment_vnet(
self,
parameters, # type: "_models.VnetParameters"
**kwargs # type: Any
):
"""Verifies if this VNET is compatible with an App Service Environment by analyzing the Network Security Group rules.
Description for Verifies if this VNET is compatible with an App Service Environment by
analyzing the Network Security Group rules.
:param parameters: VNET information.
:type parameters: ~azure.mgmt.web.v2020_09_01.models.VnetParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VnetValidationFailureDetails, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.VnetValidationFailureDetails
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('verify_hosting_environment_vnet')
if api_version == '2016-03-01':
from .v2016_03_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
elif api_version == '2020-09-01':
from .v2020_09_01.operations import WebSiteManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'verify_hosting_environment_vnet'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.verify_hosting_environment_vnet(parameters, **kwargs)
| 55.307143
| 146
| 0.714426
| 4,364
| 38,715
| 6.086847
| 0.063474
| 0.071152
| 0.054888
| 0.170764
| 0.845349
| 0.827617
| 0.821105
| 0.793547
| 0.77431
| 0.759176
| 0
| 0.049051
| 0.211701
| 38,715
| 699
| 147
| 55.386266
| 0.821324
| 0.280511
| 0
| 0.775556
| 0
| 0
| 0.089631
| 0.024548
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.2
| 0
| 0.282222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4dd29a0bfa0a54051ef59452f5f0fe4194d5e80e
| 296
|
py
|
Python
|
bentoml/tensorflow.py
|
sauyon/BentoML
|
ff702f1fc1ee7cc4cf7aab2e67d1e27512858fe4
|
[
"Apache-2.0"
] | null | null | null |
bentoml/tensorflow.py
|
sauyon/BentoML
|
ff702f1fc1ee7cc4cf7aab2e67d1e27512858fe4
|
[
"Apache-2.0"
] | null | null | null |
bentoml/tensorflow.py
|
sauyon/BentoML
|
ff702f1fc1ee7cc4cf7aab2e67d1e27512858fe4
|
[
"Apache-2.0"
] | null | null | null |
from ._internal.frameworks.tensorflow_v2 import load
from ._internal.frameworks.tensorflow_v2 import save
from ._internal.frameworks.tensorflow_v2 import load_runner
from ._internal.frameworks.tensorflow_v2 import import_from_tfhub
__all__ = ["load", "load_runner", "save", "import_from_tfhub"]
| 42.285714
| 65
| 0.834459
| 39
| 296
| 5.871795
| 0.282051
| 0.209607
| 0.384279
| 0.558952
| 0.733624
| 0.733624
| 0.384279
| 0
| 0
| 0
| 0
| 0.014652
| 0.077703
| 296
| 6
| 66
| 49.333333
| 0.824176
| 0
| 0
| 0
| 0
| 0
| 0.121622
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4de5d86b94da5fb18c5b7035e04c57eb233c6a73
| 6,527
|
py
|
Python
|
tests/filters/test_remove_duplicates.py
|
PEM-Humboldt/wildlife-insights-utils
|
90f0a3641830cee15ff933433b649d3ccec8c516
|
[
"MIT"
] | null | null | null |
tests/filters/test_remove_duplicates.py
|
PEM-Humboldt/wildlife-insights-utils
|
90f0a3641830cee15ff933433b649d3ccec8c516
|
[
"MIT"
] | null | null | null |
tests/filters/test_remove_duplicates.py
|
PEM-Humboldt/wildlife-insights-utils
|
90f0a3641830cee15ff933433b649d3ccec8c516
|
[
"MIT"
] | null | null | null |
"""
Test cases for the wiutils.filters.remove_duplicates function.
"""
import pandas as pd
import pytest
from wiutils.filters import remove_duplicates
@pytest.fixture(scope="function")
def images():
return pd.DataFrame(
{
"deployment_id": [
"001",
"001",
"001",
"001",
"001",
"001",
"001",
"002",
"002",
"002",
],
"scientific_name": [
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Leptotila verreauxi",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
],
"timestamp": [
"2020-12-01 10:13:13",
"2020-12-01 10:15:05",
"2020-12-01 10:16:46",
"2020-12-01 14:22:38",
"2020-12-03 08:15:57",
"2020-12-15 06:12:32",
"2020-12-01 10:14:04",
"2020-12-01 14:08:21",
"2020-12-01 14:08:21",
"2020-12-01 19:21:34",
],
}
)
def test_unit_seconds(images):
result = remove_duplicates(
images, interval=5, unit="seconds", species_col="scientific_name"
)
expected = pd.DataFrame(
{
"deployment_id": [
"001",
"001",
"001",
"001",
"001",
"001",
"001",
"002",
"002",
],
"scientific_name": [
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Leptotila verreauxi",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
],
"timestamp": [
"2020-12-01 10:13:13",
"2020-12-01 10:15:05",
"2020-12-01 10:16:46",
"2020-12-01 14:22:38",
"2020-12-03 08:15:57",
"2020-12-15 06:12:32",
"2020-12-01 10:14:04",
"2020-12-01 14:08:21",
"2020-12-01 19:21:34",
],
}
)
pd.testing.assert_frame_equal(result, expected)
def test_unit_minutes(images):
result = remove_duplicates(
images, interval=30, unit="minutes", species_col="scientific_name"
)
expected = pd.DataFrame(
{
"deployment_id": ["001", "001", "001", "001", "001", "002", "002"],
"scientific_name": [
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Leptotila verreauxi",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
],
"timestamp": [
"2020-12-01 10:13:13",
"2020-12-01 14:22:38",
"2020-12-03 08:15:57",
"2020-12-15 06:12:32",
"2020-12-01 10:14:04",
"2020-12-01 14:08:21",
"2020-12-01 19:21:34",
],
}
)
pd.testing.assert_frame_equal(result, expected)
def test_unit_hours(images):
result = remove_duplicates(
images, interval=5, unit="hours", species_col="scientific_name"
)
expected = pd.DataFrame(
{
"deployment_id": ["001", "001", "001", "001", "002", "002"],
"scientific_name": [
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Leptotila verreauxi",
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
],
"timestamp": [
"2020-12-01 10:13:13",
"2020-12-03 08:15:57",
"2020-12-15 06:12:32",
"2020-12-01 10:14:04",
"2020-12-01 14:08:21",
"2020-12-01 19:21:34",
],
}
)
pd.testing.assert_frame_equal(result, expected)
def test_unit_days(images):
result = remove_duplicates(
images, interval=4, unit="days", species_col="scientific_name"
)
expected = pd.DataFrame(
{
"deployment_id": [
"001",
"001",
"001",
"002",
],
"scientific_name": [
"Myrmecophaga tridactyla",
"Myrmecophaga tridactyla",
"Leptotila verreauxi",
"Myrmecophaga tridactyla",
],
"timestamp": [
"2020-12-01 10:13:13",
"2020-12-15 06:12:32",
"2020-12-01 10:14:04",
"2020-12-01 14:08:21",
],
}
)
pd.testing.assert_frame_equal(result, expected)
def test_unit_weeks(images):
result = remove_duplicates(
images, interval=3, unit="weeks", species_col="scientific_name"
)
expected = pd.DataFrame(
{
"deployment_id": ["001", "001", "002"],
"scientific_name": [
"Myrmecophaga tridactyla",
"Leptotila verreauxi",
"Myrmecophaga tridactyla",
],
"timestamp": [
"2020-12-01 10:13:13",
"2020-12-01 10:14:04",
"2020-12-01 14:08:21",
],
}
)
pd.testing.assert_frame_equal(result, expected)
def test_keep_index(images):
result = remove_duplicates(images, reset_index=False, species_col="scientific_name")
expected_index = pd.Index([0, 3, 4, 5, 6, 7, 9], dtype="int64")
pd.testing.assert_index_equal(result.index, expected_index)
def test_intact_input(images):
images_original = images.copy()
remove_duplicates(images, species_col="scientific_name")
pd.testing.assert_frame_equal(images_original, images)
| 30.078341
| 88
| 0.46254
| 608
| 6,527
| 4.853618
| 0.141447
| 0.079295
| 0.081328
| 0.313114
| 0.843782
| 0.812945
| 0.770247
| 0.758726
| 0.719417
| 0.719417
| 0
| 0.176425
| 0.411215
| 6,527
| 216
| 89
| 30.217593
| 0.591465
| 0.009499
| 0
| 0.744898
| 0
| 0
| 0.325074
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 1
| 0.040816
| false
| 0
| 0.015306
| 0.005102
| 0.061224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1517cc15db85ebc254cd0108401ec26386a02bd3
| 217
|
py
|
Python
|
demo_worker/tasks/__init__.py
|
selinon/demo-worker
|
052a4924219be006c626479156e35803ca5545af
|
[
"MIT"
] | null | null | null |
demo_worker/tasks/__init__.py
|
selinon/demo-worker
|
052a4924219be006c626479156e35803ca5545af
|
[
"MIT"
] | 4
|
2018-05-04T07:13:38.000Z
|
2022-03-17T19:04:18.000Z
|
demo_worker/tasks/__init__.py
|
selinon/demo-worker
|
052a4924219be006c626479156e35803ca5545af
|
[
"MIT"
] | 1
|
2021-07-01T07:34:23.000Z
|
2021-07-01T07:34:23.000Z
|
from .hello import HelloTask
from .travis import TravisActiveRepos
from .travis import TravisLogTxt
from .travis import TravisRepoBuilds
from .travis import TravisRepoBuildsCount
from .travis import TravisLogCleanup
| 27.125
| 41
| 0.857143
| 24
| 217
| 7.75
| 0.416667
| 0.268817
| 0.430108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115207
| 217
| 7
| 42
| 31
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
12cb421abeed62e1ec7e7ef89fa762838baa66b5
| 57
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_0/__init__.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from ._pkg0_0_0_0_0 import *
from ._pkg0_0_0_0_1 import *
| 28.5
| 28
| 0.807018
| 14
| 57
| 2.571429
| 0.357143
| 0.277778
| 0.25
| 0.555556
| 0.611111
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.122807
| 57
| 2
| 29
| 28.5
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
420c11332931aa15912d64d5efd5ead35366a3be
| 162
|
py
|
Python
|
pyalgo/basic_modules/algocryption_default_values/Simple_Columnar_Transposition_default_values/Simple_Columnar_Transposition_default_values.py
|
gilad-dotan/pyalgo_pkg
|
132ff3c032c3fc0ae910201611e5d2cde387eb74
|
[
"MIT"
] | 1
|
2021-04-01T08:59:30.000Z
|
2021-04-01T08:59:30.000Z
|
pyalgo/basic_modules/algocryption_default_values/Simple_Columnar_Transposition_default_values/Simple_Columnar_Transposition_default_values.py
|
gilad-dotan/pyalgo_pkg
|
132ff3c032c3fc0ae910201611e5d2cde387eb74
|
[
"MIT"
] | null | null | null |
pyalgo/basic_modules/algocryption_default_values/Simple_Columnar_Transposition_default_values/Simple_Columnar_Transposition_default_values.py
|
gilad-dotan/pyalgo_pkg
|
132ff3c032c3fc0ae910201611e5d2cde387eb74
|
[
"MIT"
] | null | null | null |
encrypt_type_to_return_default_values = ["String", "List", "Bytes", "ByteArray"]
decrypt_type_to_return_default_values = ["String", "List", "Bytes", "ByteArray"]
| 81
| 81
| 0.753086
| 20
| 162
| 5.6
| 0.55
| 0.107143
| 0.214286
| 0.339286
| 0.875
| 0.875
| 0.875
| 0.875
| 0.875
| 0
| 0
| 0
| 0.074074
| 162
| 2
| 82
| 81
| 0.746667
| 0
| 0
| 0
| 0
| 0
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4229553715f6254e8fbe9b861f98a67cc01aa46f
| 14,063
|
py
|
Python
|
CorporaD/interface0.0.1.py
|
Abilay99/opencorpora.kz
|
e91f7763a8fe0c4e4a4b1141cf36393459793c90
|
[
"MIT"
] | 1
|
2021-07-07T02:49:59.000Z
|
2021-07-07T02:49:59.000Z
|
CorporaD/interface0.0.1.py
|
Abilay99/opencorpora.kz
|
e91f7763a8fe0c4e4a4b1141cf36393459793c90
|
[
"MIT"
] | null | null | null |
CorporaD/interface0.0.1.py
|
Abilay99/opencorpora.kz
|
e91f7763a8fe0c4e4a4b1141cf36393459793c90
|
[
"MIT"
] | null | null | null |
from PyQt5 import QtWidgets, QtCore, QtGui
from interfaceForm import Ui_MainWindow
import sys
import os
import glob
from CorporaDB import corporaDB
import collections, re
from execute import *
from Global import sozgebolu, bigram
papka_korpus = os.path.dirname(os.path.abspath(__file__))
#db connect
ob = corporaDB()
lencorp = int(ob.Count_corpora()[0]['sany'])
#-------------------------------------------------------------------------------------------------
class mywindow(QtWidgets.QMainWindow):
def __init__(self):
super(mywindow, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.pushButton.clicked.connect(self.clickbtn)
self.ui.pushButton_3.clicked.connect(self.reload)
self.ui.actionOpen.triggered.connect(self.openFileNameDialog)
self.ui.actionSave.triggered.connect(self.saveFileDialog)
self.tfidf = {}
self.bi_tfidf = {}
def openFileNameDialog(self):
options = QtWidgets.QFileDialog.Options()
options |= QtWidgets.QFileDialog.DontUseNativeDialog
fileName, _ = QtWidgets.QFileDialog.getOpenFileName(self, "QFileDialog.getOpenFileName()", "","Text Files (*.txt);;All Files (*)", options=options)
if fileName:
with open(fileName, 'r', encoding="utf-8") as f:
self.ui.plainTextEdit.setPlainText(f.read())
def saveFileDialog(self):
options = QtWidgets.QFileDialog.Options()
options |= QtWidgets.QFileDialog.DontUseNativeDialog
fileName, _ = QtWidgets.QFileDialog.getSaveFileName(self,"QFileDialog.getSaveFileName()","<filename>-keywords", "Text Files (*.txt);;All Files (*)", options=options)
if fileName:
with open(fileName+".txt", 'w', encoding="utf-8") as f:
for x in self.tfidf:
f.write(str(x)+"\n")
for x in self.bi_tfidf:
f.write(str(x)+"\n")
def reload(self):
if self.ui.rettext() != "":
self.ui.pushButton.setVisible(False)
self.ui.progressBar.setVisible(True)
self.ui.pushButton_2.setVisible(False)
self.ui.pushButton_3.setVisible(False)
with open(os.path.join(papka_korpus,'tmp/text.tmp'),'w', encoding="utf-8") as f:
f.write(self.ui.rettext())
os.system('''cd $HOME/sources/apertium-kaz-rus\ncat "{0}" | apertium -n -d. kaz-rus-tagger > "{1}"'''.format(os.path.join(papka_korpus,'tmp/text.tmp'), os.path.join(papka_korpus,'tmp/app.tmp')))
apertium = open(os.path.join(papka_korpus,'tmp/app.tmp'),'r',encoding="utf-8").read()
editedapertium = str(EditedApertium_DB(text = self.ui.rettext(), apertium = apertium))
outtexts = str(outtexts_DB(aptext = editedapertium))
train = str(train_DB(outtexts = outtexts))
txt = outtexts
#failda berilgen matinnen tek sozderdi wygaryp beredi
soz = sozgebolu(txt)
#bigram klasssyndagy konstruktordy qoldanyluy
bi = bigram(text = txt)
text = [bi.newlemm, bi.lastlemm]
#TF_IDF klasssyndagy konstruktordy qoldanyluy
TfIdf = tf_idf(text = soz, len_corp = lencorp, objectCorporaDB = ob)
#esepteuler
tf = TfIdf.tf_esepteu()
self.ui.progressBar.setValue(16)
idf = TfIdf.idf_esepteu()
self.ui.progressBar.setValue(32)
self.tfidf = TfIdf.tf_idf_esepteu()
self.ui.progressBar.setValue(48)
#bigram TF_IDF klasssyndagy konstruktordy qoldanyluy
BiTfIdf = bi_tf_idf(text = text, len_corp = lencorp, objectCorporaDB = ob)
bi_tf = BiTfIdf.bi_tf_esepteu()
self.ui.progressBar.setValue(64)
bi_idf = BiTfIdf.bi_idf_esepteu()
self.ui.progressBar.setValue(80)
self.bi_tfidf = BiTfIdf.bi_tf_idf_esepteu()
self.ui.progressBar.setValue(100)
row = 1
col = 0
font = QtGui.QFont()
font.setPointSize(12)
font.setFamily("Times New Roman")
for x in tf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(tf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
for x in bi_tf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(bi_tf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
row = 1
col = 2
for x in idf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(idf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
for x in bi_idf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(bi_idf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
row = 1
col = 4
for x in self.tfidf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(self.tfidf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
for x in self.bi_tfidf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(self.bi_tfidf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
if self.ui.progressBar.value() == int(100):
self.ui.clearbar()
def clickbtn(self):
if self.ui.rettext() != "":
self.ui.pushButton.setVisible(False)
self.ui.progressBar.setVisible(True)
with open(os.path.join(papka_korpus,'tmp/text.tmp'),'w', encoding="utf-8") as f:
f.write(self.ui.rettext())
os.system('''cd $HOME/sources/apertium-kaz-rus\ncat "{0}" | apertium -n -d. kaz-rus-tagger > "{1}"'''.format(os.path.join(papka_korpus,'tmp/text.tmp'), os.path.join(papka_korpus,'tmp/app.tmp')))
apertium = open(os.path.join(papka_korpus,'tmp/app.tmp'),'r',encoding="utf-8").read()
editedapertium = str(EditedApertium_DB(text = self.ui.rettext(), apertium = apertium))
outtexts = str(outtexts_DB(aptext = editedapertium))
train = str(train_DB(outtexts = outtexts))
txt = outtexts
#failda berilgen matinnen tek sozderdi wygaryp beredi
soz = sozgebolu(txt)
#bigram klasssyndagy konstruktordy qoldanyluy
bi = bigram(text = txt)
text = [bi.newlemm, bi.lastlemm]
#TF_IDF klasssyndagy konstruktordy qoldanyluy
TfIdf = tf_idf(text = soz, len_corp = lencorp, objectCorporaDB = ob)
#esepteuler
tf = TfIdf.tf_esepteu()
self.ui.progressBar.setValue(16)
idf = TfIdf.idf_esepteu()
self.ui.progressBar.setValue(32)
self.tfidf = TfIdf.tf_idf_esepteu()
self.ui.progressBar.setValue(48)
#bigram TF_IDF klasssyndagy konstruktordy qoldanyluy
BiTfIdf = bi_tf_idf(text = text, len_corp = lencorp, objectCorporaDB = ob)
bi_tf = BiTfIdf.bi_tf_esepteu()
self.ui.progressBar.setValue(64)
bi_idf = BiTfIdf.bi_idf_esepteu()
self.ui.progressBar.setValue(80)
self.bi_tfidf = BiTfIdf.bi_tf_idf_esepteu()
self.ui.progressBar.setValue(100)
row = 1
col = 0
font = QtGui.QFont()
font.setPointSize(12)
font.setFamily("Times New Roman")
for x in tf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(tf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
for x in bi_tf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(bi_tf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
row = 1
col = 2
for x in idf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(idf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
for x in bi_idf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(bi_idf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
row = 1
col = 4
for x in self.tfidf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(self.tfidf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
for x in self.bi_tfidf:
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(x))
self.ui.tableWidget.setItem(row, col, item)
item = QtWidgets.QTableWidgetItem()
item.setBackground(QtGui.QColor(192,254,255))
item.setFont(font)
item.setText(str(round(self.bi_tfidf[x], 5)))
self.ui.tableWidget.setItem(row, col+1, item)
row += 1
self.ui.label_2.setVisible(True)
self.ui.tabledsgn()
self.ui.resizewin()
if self.ui.progressBar.value() == int(100):
self.ui.clearbar()
if __name__ == "__main__":
app = QtWidgets.QApplication([])
app.setApplicationName("Кілттік сөз")
app.setApplicationVersion("0.0.1")
application = mywindow()
application.show()
sys.exit(app.exec())
| 44.503165
| 207
| 0.537296
| 1,498
| 14,063
| 4.977303
| 0.118158
| 0.049893
| 0.093348
| 0.106223
| 0.858771
| 0.855418
| 0.847371
| 0.847371
| 0.847371
| 0.847371
| 0
| 0.034494
| 0.342388
| 14,063
| 316
| 208
| 44.503165
| 0.771734
| 0.036194
| 0
| 0.84058
| 0
| 0.007246
| 0.038328
| 0.009676
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018116
| false
| 0
| 0.032609
| 0
| 0.054348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
422cd6de7c68487fde966155188e8df465a40cf6
| 4,996
|
py
|
Python
|
toykoin/tests/test_p2p.py
|
giacomocaironi/toykoin
|
4ba0f95953e116b04f359c2ea9b9926ce41ef02f
|
[
"MIT"
] | 1
|
2020-06-18T21:17:27.000Z
|
2020-06-18T21:17:27.000Z
|
toykoin/tests/test_p2p.py
|
giacomocaironi/toykoin
|
4ba0f95953e116b04f359c2ea9b9926ce41ef02f
|
[
"MIT"
] | 1
|
2020-08-19T18:45:57.000Z
|
2020-08-19T18:45:57.000Z
|
toykoin/tests/test_p2p.py
|
giacomocaironi/toykoin
|
4ba0f95953e116b04f359c2ea9b9926ce41ef02f
|
[
"MIT"
] | null | null | null |
from toykoin.daemon.node import Node
from toykoin.daemon.messages import add_headers, Debug, Version
from toykoin.core.utils import reset_blockchain
import time
import socket
def test_basic_connection():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
node1.connect("0.0.0.0", 20000)
time.sleep(0.2)
node1.connections[0].send(Debug(payload=b"").serialize())
time.sleep(0.2)
assert node2.connections[0].messages == [["debug", b""]]
node1.connections[0].send(Debug(payload=b"\x01").serialize())
time.sleep(0.2)
assert node2.connections[0].messages == [["debug", b""], ["debug", b"\x01"]]
assert node2.connections[0].buffer == b""
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
def test_invalid_headers():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
node1.connect("0.0.0.0", 20000)
time.sleep(0.2)
node1.connections[0].send(Debug(payload=b"").serialize() + b"\x01")
time.sleep(0.2)
assert node2.connections[0].messages == []
assert node2.connections[0].buffer != b""
node1.connections[0].send(Debug(payload=b"").serialize())
time.sleep(0.2)
assert node2.connections[0].messages == [["debug", b""]]
assert node2.connections[0].buffer == b""
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
def test_split_message():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
node1.connect("0.0.0.0", 20000)
time.sleep(0.2)
msg = b"regtest" + add_headers("debug", b"")
node1.connections[0].socket.sendall(msg[:4])
time.sleep(0.2)
assert node2.connections[0].buffer != b""
node1.connections[0].socket.sendall(msg[4:])
time.sleep(0.2)
assert node2.connections[0].messages == [["debug", b""]]
assert node2.connections[0].buffer == b""
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
def test_split_message_2():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
node1.connect("0.0.0.0", 20000)
time.sleep(0.2)
assert str(node2.connections[0]) == "Connection to 127.0.0.1:10000"
node1.sendall(Debug(payload=b"\x01" * 8192).serialize())
time.sleep(0.2)
assert node2.connections[0].messages == [["debug", b"\x01" * 8192]]
assert node2.connections[0].buffer == b""
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
def test_shutdown():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
node1.connect("0.0.0.0", 20000)
time.sleep(0.2)
node2.connections[0].stop()
time.sleep(0.2)
assert node2.connections == []
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
def test_invalid_version():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("0.0.0.0", 20000))
sock.send(Version(2, 10000).serialize())
time.sleep(0.3)
assert not node2.connections
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
def test_invalid_first_message():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("0.0.0.0", 20000))
sock.send(Debug(b"").serialize())
time.sleep(0.3)
assert not node2.connections
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
def test_invalid_second_message():
try:
node1 = Node(10000, "node1")
node2 = Node(20000, "node2")
node1.start()
node2.start()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("0.0.0.0", 20000))
sock.send(Version(1, 0).serialize())
time.sleep(0.2)
sock.send(Debug(b"").serialize())
time.sleep(0.2)
assert not node2.connections
finally:
node1.stop()
node2.stop()
reset_blockchain("node1")
reset_blockchain("node2")
| 29.56213
| 84
| 0.570256
| 601
| 4,996
| 4.668885
| 0.101498
| 0.017819
| 0.060584
| 0.058803
| 0.874911
| 0.863863
| 0.863863
| 0.857448
| 0.831433
| 0.803635
| 0
| 0.098018
| 0.273018
| 4,996
| 168
| 85
| 29.738095
| 0.674559
| 0
| 0
| 0.821192
| 0
| 0
| 0.061449
| 0
| 0
| 0
| 0
| 0
| 0.112583
| 1
| 0.05298
| false
| 0
| 0.033113
| 0
| 0.086093
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
424e6b6d84fec8fd015e33ef8d2d7abb470f6afb
| 114
|
py
|
Python
|
warno_agent/src/test_pyarmret_import.py
|
ARM-DOE/warno
|
231f0eb87fa3011133f361ebac780fc21d0968c6
|
[
"BSD-3-Clause"
] | 4
|
2017-08-09T15:27:19.000Z
|
2021-03-11T07:16:09.000Z
|
warno_agent/src/test_pyarmret_import.py
|
ARM-DOE/warno
|
231f0eb87fa3011133f361ebac780fc21d0968c6
|
[
"BSD-3-Clause"
] | null | null | null |
warno_agent/src/test_pyarmret_import.py
|
ARM-DOE/warno
|
231f0eb87fa3011133f361ebac780fc21d0968c6
|
[
"BSD-3-Clause"
] | 2
|
2017-08-09T15:27:28.000Z
|
2019-05-22T16:09:06.000Z
|
from pyarmret.io.PAFClient import PAFClient
def test_pafclient_import():
assert PAFClient("localhost", 3000)
| 22.8
| 43
| 0.789474
| 14
| 114
| 6.285714
| 0.714286
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0.122807
| 114
| 4
| 44
| 28.5
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
429f4cbe66c5ad6f9b76e8d6c0ac0963d6bfc445
| 98
|
py
|
Python
|
process_cuwb_data/filters/__init__.py
|
WildflowerSchools/wf-process-cuwb-data
|
8d94eeec82401f0f62ce1e0b7fdefd49e0328921
|
[
"MIT"
] | null | null | null |
process_cuwb_data/filters/__init__.py
|
WildflowerSchools/wf-process-cuwb-data
|
8d94eeec82401f0f62ce1e0b7fdefd49e0328921
|
[
"MIT"
] | 2
|
2020-10-01T18:18:05.000Z
|
2020-12-17T22:40:06.000Z
|
process_cuwb_data/filters/__init__.py
|
WildflowerSchools/wf-process-cuwb-data
|
8d94eeec82401f0f62ce1e0b7fdefd49e0328921
|
[
"MIT"
] | null | null | null |
from .butter import *
from .filt_filt import *
from .sos_filt_filt import *
from .savgol import *
| 19.6
| 28
| 0.755102
| 15
| 98
| 4.733333
| 0.4
| 0.422535
| 0.394366
| 0.507042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 98
| 4
| 29
| 24.5
| 0.865854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c42640fd17c458bb7986737147ddc889c4473692
| 104
|
py
|
Python
|
entity/cards/LETLT_055/__init__.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 102
|
2021-10-20T09:06:39.000Z
|
2022-03-28T13:35:11.000Z
|
entity/cards/LETLT_055/__init__.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 98
|
2021-10-19T16:13:27.000Z
|
2022-03-27T13:27:49.000Z
|
entity/cards/LETLT_055/__init__.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 55
|
2021-10-19T03:56:50.000Z
|
2022-03-25T08:25:26.000Z
|
# -*- coding: utf-8 -*-
import entity.cards.LETLT_055.LETLT_055
import entity.cards.LETLT_055.LETLT_055
| 26
| 39
| 0.769231
| 17
| 104
| 4.470588
| 0.470588
| 0.421053
| 0.447368
| 0.578947
| 0.868421
| 0.868421
| 0.868421
| 0
| 0
| 0
| 0
| 0.136842
| 0.086538
| 104
| 3
| 40
| 34.666667
| 0.663158
| 0.201923
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
c42ce10934f97248113a8a4ad7667ccbc9dfb3cb
| 202
|
py
|
Python
|
addons/AddRoutes/rtmidi/__init__.py
|
trisadmeslek/V-Sekai-Blender-tools
|
0d8747387c58584b50c69c61ba50a881319114f8
|
[
"MIT"
] | 2
|
2021-12-21T16:38:58.000Z
|
2022-01-08T00:56:35.000Z
|
rtmidi/__init__.py
|
rwreynolds/python-rtmidi
|
04ba3a69da0437b21ca3bb6f359c8ebaf06f3bd2
|
[
"MIT"
] | 1
|
2022-01-29T05:46:50.000Z
|
2022-01-29T05:46:50.000Z
|
rtmidi/__init__.py
|
rwreynolds/python-rtmidi
|
04ba3a69da0437b21ca3bb6f359c8ebaf06f3bd2
|
[
"MIT"
] | 1
|
2021-11-07T19:41:34.000Z
|
2021-11-07T19:41:34.000Z
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from .release import version as __version__ # noqa
from ._rtmidi import * # noqa
from ._rtmidi import __doc__ # noqa
del absolute_import
| 28.857143
| 51
| 0.757426
| 27
| 202
| 5.074074
| 0.518519
| 0.20438
| 0.20438
| 0.291971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005917
| 0.163366
| 202
| 6
| 52
| 33.666667
| 0.804734
| 0.173267
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c45571c17f444ccd3d5ff2d93627c944ac516f73
| 9,078
|
py
|
Python
|
flybirds/core/plugin/plugins/default/step/verify.py
|
LinuxSuRen/flybirds
|
a99027d87f61966c148dbcd314fcef8037c78a17
|
[
"MIT"
] | 183
|
2021-12-31T08:45:54.000Z
|
2022-03-31T06:01:41.000Z
|
flybirds/core/plugin/plugins/default/step/verify.py
|
Nightwish555/flybirds
|
49de598a52d349ee407d24e6339dfedd8c356cf5
|
[
"MIT"
] | 7
|
2022-01-18T05:13:20.000Z
|
2022-03-12T06:45:40.000Z
|
flybirds/core/plugin/plugins/default/step/verify.py
|
Nightwish555/flybirds
|
49de598a52d349ee407d24e6339dfedd8c356cf5
|
[
"MIT"
] | 37
|
2021-12-31T09:23:42.000Z
|
2022-03-31T04:54:50.000Z
|
# -*- coding: utf-8 -*-
"""
Element verification
"""
import re
import flybirds.core.global_resource as gr
import flybirds.core.plugin.plugins.default.ui_driver.poco.poco_ele\
as poco_ele
import flybirds.core.plugin.plugins.default.ui_driver.poco.poco_findsnap\
as poco_find_snap
import flybirds.core.plugin.plugins.default.ui_driver.poco.poco_verify\
as poco_verify
import flybirds.utils.dsl_helper as dsl_helper
def wait_text_exist(context, param):
poco_instance = gr.get_value("pocoInstance")
param_dict = dsl_helper.params_to_dic(param)
selector_str = param_dict["selector"]
config = None
use_snap = gr.get_frame_config_value("use_snap", False)
if "fuzzyMatch" in param_dict.keys():
if use_snap:
config = {"textMatches": selector_str}
else:
selector_str = "textMatches=" + selector_str
else:
if use_snap:
config = {"text": selector_str}
else:
selector_str = "text=" + selector_str
optional = {}
if "timeout" in param_dict.keys():
optional["timeout"] = float(param_dict["timeout"])
else:
optional["timeout"] = gr.get_frame_config_value("wait_ele_timeout", 10)
if use_snap and config:
poco_find_snap.find_ele_by_snap(poco_instance, config, optional)
else:
poco_ele.wait_exists(poco_instance, selector_str, optional)
def text_not_exist(context, param):
poco_instance = gr.get_value("pocoInstance")
param_dict = dsl_helper.params_to_dic(param)
selector_str = param_dict["selector"]
if "fuzzyMatch" in param_dict.keys():
selector_str = "textMatches=" + selector_str
else:
selector_str = "text=" + selector_str
optional = {}
poco_ele.not_exist(poco_instance, selector_str, optional)
def wait_text_disappear(context, param):
poco_instance = gr.get_value("pocoInstance")
param_dict = dsl_helper.params_to_dic(param)
selector_str = param_dict["selector"]
if "fuzzyMatch" in param_dict.keys():
selector_str = "textMatches=" + selector_str
else:
selector_str = "text=" + selector_str
optional = {}
if "timeout" in param_dict.keys():
optional["timeout"] = float(param_dict["timeout"])
else:
optional["timeout"] = gr.get_frame_config_value(
"wait_ele_disappear", 10
)
poco_ele.wait_disappear(poco_instance, selector_str, optional)
def wait_ele_exit(context, param):
poco_instance = gr.get_value("pocoInstance")
param_dict = dsl_helper.params_to_dic(param)
selector_str = param_dict["selector"]
optional = {}
config = None
use_snap = gr.get_frame_config_value("use_snap", False)
if "path" in param_dict.keys():
optional["path"] = param_dict["path"]
elif "multiSelector" in param_dict.keys():
optional["multiSelector"] = param_dict["multiSelector"]
elif use_snap:
config = {"name": selector_str}
if "timeout" in param_dict.keys():
optional["timeout"] = float(param_dict["timeout"])
else:
optional["timeout"] = gr.get_frame_config_value("wait_ele_timeout", 10)
if use_snap and config:
print("Use snap to determine the existence of elements")
poco_find_snap.find_ele_by_snap(poco_instance, config, optional)
else:
poco_ele.wait_exists(poco_instance, selector_str, optional)
def ele_not_exit(context, param):
poco_instance = gr.get_value("pocoInstance")
param_dict = dsl_helper.params_to_dic(param)
selector_str = param_dict["selector"]
optional = {}
if "path" in param_dict.keys():
optional["path"] = param_dict["path"]
elif "multiSelector" in param_dict.keys():
optional["multiSelector"] = param_dict["multiSelector"]
poco_ele.not_exist(poco_instance, selector_str, optional)
def wait_ele_disappear(context, param):
poco_instance = gr.get_value("pocoInstance")
param_dict = dsl_helper.params_to_dic(param)
selector_str = param_dict["selector"]
optional = {}
if "path" in param_dict.keys():
optional["path"] = param_dict["path"]
elif "multiSelector" in param_dict.keys():
optional["multiSelector"] = param_dict["multiSelector"]
if "timeout" in param_dict.keys():
optional["timeout"] = float(param_dict["timeout"])
else:
optional["timeout"] = gr.get_frame_config_value(
"wait_ele_disappear", 10
)
poco_ele.wait_disappear(poco_instance, selector_str, optional)
def ele_text_equal(context, param1, param2):
poco_instance = gr.get_value("pocoInstance")
pattern = re.compile(r"\s+")
param1_dict = dsl_helper.params_to_dic(param1)
selector_str = param1_dict["selector"]
optional = {}
config = None
# use snap
# use_snap = gr.get_frame_config_value("use_snap", False)
if "path" in param1_dict.keys():
optional["path"] = param1_dict["path"]
elif "multiSelector" in param1_dict.keys():
optional["multiSelector"] = param1_dict["multiSelector"]
if "timeout" in param1_dict.keys():
optional["timeout"] = float(param1_dict["timeout"])
else:
optional["timeout"] = gr.get_frame_config_value("wait_ele_timeout", 10)
params_deal_module = None
deal_method = None
param2_dict = dsl_helper.params_to_dic(param2, "tText")
if "dealMethod" in param2_dict.keys():
deal_method = param2_dict["dealMethod"]
params_deal_module = gr.get_value("projectScript").params_deal
target_str = param2_dict["tText"]
handled_target_str = re.sub(pattern, "", target_str.replace(u"\u200b", ""))
handled_selector_str = re.sub(
pattern, "", selector_str.replace(u"\u200b", "")
)
if not (deal_method is None):
deal_method_fun = getattr(params_deal_module, deal_method)
handled_target_str = deal_method_fun(handled_target_str)
pattern = re.compile(r"\s+")
handled_target_str = re.sub(pattern, "", handled_target_str)
if False:
config = {"name": selector_str, "expect_text": target_str}
poco_find_snap.verify_ele_by_snap(poco_instance, config, optional)
else:
poco_verify.ele_text_is(
poco_instance,
handled_selector_str,
handled_target_str,
optional,
deal_method,
params_deal_module,
)
def ele_text_container(context, param1, param2):
poco_instance = gr.get_value("pocoInstance")
param1_dict = dsl_helper.params_to_dic(param1)
selector_str = param1_dict["selector"]
optional = {}
if "path" in param1_dict.keys():
optional["path"] = param1_dict["path"]
elif "multiSelector" in param1_dict.keys():
optional["multiSelector"] = param1_dict["multiSelector"]
if "timeout" in param1_dict.keys():
optional["timeout"] = float(param1_dict["timeout"])
else:
optional["timeout"] = gr.get_frame_config_value("wait_ele_timeout", 10)
deal_method = None
params_deal_module = None
param2_dict = dsl_helper.params_to_dic(param2, "tText")
if "dealMethod" in param2_dict.keys():
deal_method = param2_dict["dealMethod"]
params_deal_module = gr.get_value("projectScript").params_deal
target_str = param2_dict["tText"]
pattern = re.compile(r"\s+")
handled_target_str = re.sub(pattern, "", target_str.replace(u"\u200b", ""))
if not (deal_method is None):
deal_method_fun = getattr(params_deal_module, deal_method)
handled_target_str = deal_method_fun(handled_target_str)
pattern = re.compile(r"\s+")
handled_target_str = re.sub(pattern, "", handled_target_str)
poco_verify.ele_text_contains(
poco_instance,
selector_str,
handled_target_str,
optional,
deal_method,
params_deal_module,
)
def wait_ele_appear(context, param):
poco_instance = gr.get_value("pocoInstance")
param_dict = dsl_helper.params_to_dic(param)
selector_str = param_dict["selector"]
optional = {}
if "path" in param_dict.keys():
optional["path"] = param_dict["path"]
elif "multiSelector" in param_dict.keys():
optional["multiSelector"] = param_dict["multiSelector"]
if "timeout" in param_dict.keys():
optional["timeout"] = float(param_dict["timeout"])
else:
optional["timeout"] = gr.get_frame_config_value(
"page_render_timeout", 30
)
poco_ele.wait_exists(poco_instance, selector_str, optional)
def exist_ele(context, param):
"""
Compatible with the old version of the statement, it will be discarded in
the future.
"""
poco_instance = gr.get_value("pocoInstance")
params_array = param.split(",")
selector_str = params_array[0]
optional = {}
if len(params_array) >= 2:
optional["timeout"] = float(params_array[1])
else:
optional["timeout"] = gr.get_frame_config_value(
"wait_ele_timeout", 10
)
poco_ele.wait_exists(poco_instance, selector_str, optional)
| 34.781609
| 79
| 0.673386
| 1,147
| 9,078
| 5.003487
| 0.103749
| 0.067433
| 0.052971
| 0.041819
| 0.863914
| 0.860777
| 0.839519
| 0.835511
| 0.833769
| 0.799442
| 0
| 0.008505
| 0.209958
| 9,078
| 260
| 80
| 34.915385
| 0.79169
| 0.02137
| 0
| 0.785047
| 0
| 0
| 0.128515
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046729
| false
| 0
| 0.028037
| 0
| 0.074766
| 0.004673
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67084c5219077c9b45f7cbc020d2ad5a22212cf9
| 6,142
|
py
|
Python
|
library/clients/endpoints.py
|
robertoconto/newrelic-experimental-nr-account-migration
|
6b10d748f96ef77bfcda73e030598718c4504b68
|
[
"Apache-2.0"
] | 8
|
2020-10-27T17:01:57.000Z
|
2021-12-01T13:20:20.000Z
|
library/clients/endpoints.py
|
robertoconto/newrelic-experimental-nr-account-migration
|
6b10d748f96ef77bfcda73e030598718c4504b68
|
[
"Apache-2.0"
] | 22
|
2020-10-07T13:58:02.000Z
|
2021-09-16T17:57:36.000Z
|
library/clients/endpoints.py
|
robertoconto/newrelic-experimental-nr-account-migration
|
6b10d748f96ef77bfcda73e030598718c4504b68
|
[
"Apache-2.0"
] | 5
|
2020-11-05T07:10:43.000Z
|
2021-08-20T19:14:24.000Z
|
import os
import library.migrationlogger as m_logger
class Endpoints:
logger = m_logger.get_logger(os.path.basename(__file__))
REGION_US = "us"
REGION_EU = "eu"
@classmethod
def of(cls, region=REGION_US):
if region.lower() == cls.REGION_US:
return USEndpoints()
elif region.lower() == cls.REGION_EU:
return EUEndpoints()
else:
cls.logger.error("Incorrect region specified. Region can be either us or eu")
class USEndpoints:
GRAPHQL_URL = 'https://api.newrelic.com/graphql'
SHOW_APM_APP_URL = 'https://api.newrelic.com/v2/applications/'
GET_APM_APP_URL = 'https://api.newrelic.com/v2/applications.json'
GET_BROWSER_APP_URL = 'https://api.newrelic.com/v2/browser_applications.json'
SHOW_MOBILE_APP_URL = 'https://api.newrelic.com/v2/mobile_applications/'
SHOW_APM_KT_URL = 'https://api.newrelic.com/v2/key_transactions/'
GET_APM_KT_URL = 'https://api.newrelic.com/v2/key_transactions.json'
PUT_LABEL_URL = 'https://api.newrelic.com/v2/labels.json'
GET_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards.json'
SHOW_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards/'
DEL_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards/'
MONITORS_URL = 'https://synthetics.newrelic.com/synthetics/api/v3/monitors/'
MONITORS_LABEL_URL = 'https://synthetics.newrelic.com/synthetics/api/v4/monitors/'
INSIGHTS_URL = 'https://insights-api.newrelic.com/v1/accounts/%s/query'
SEC_CREDENTIALS_URL = 'https://synthetics.newrelic.com/synthetics/api/v1/secure-credentials'
ALERTS_CHANNEL_URL = 'https://api.newrelic.com/v2/alerts_channels.json'
ALERT_POLICIES_URL = 'https://api.newrelic.com/v2/alerts_policies.json'
ALERT_POLICY_CHANNELS_URL = 'https://api.newrelic.com/v2/alerts_policy_channels.json'
DEL_ALERTS_URL = 'https://api.newrelic.com/v2/alerts_policies/'
DEL_CHANNELS_URL = 'https://api.newrelic.com/v2/alerts_channels/'
GET_APP_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_conditions.json'
APP_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_conditions/'
CREATE_APP_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_conditions/policies/'
GET_SYNTH_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_synthetics_conditions.json'
CREATE_SYNTHETICS_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_synthetics_conditions/policies/'
LOC_FAILURE_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_location_failure_conditions/policies/'
NRQL_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_nrql_conditions.json'
CREATE_NRQL_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_nrql_conditions/policies/'
EXTSVC_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_external_service_conditions.json'
CREATE_EXTSVC_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_external_service_conditions/policies/'
INFRA_CONDITIONS_URL = 'https://infra-api.newrelic.com/v2/alerts/conditions'
CREATE_INFRA_CONDITION_URL = 'https://infra-api.newrelic.com/v2/alerts/conditions'
ENTITY_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_entity_conditions'
ALERT_VIOLATIONS_URL = 'https://api.newrelic.com/v2/alerts_violations.json'
class EUEndpoints:
GRAPHQL_URL = 'https://api.eu.newrelic.com/graphql'
SHOW_APM_APP_URL = 'https://api.eu.newrelic.com/v2/applications/'
GET_APM_APP_URL = 'https://api.eu.newrelic.com/v2/applications.json'
GET_BROWSER_APP_URL = 'https://api.eu.newrelic.com/v2/browser_applications.json'
SHOW_MOBILE_APP_URL = 'https://api.eu.newrelic.com/v2/mobile_applications/'
SHOW_APM_KT_URL = 'https://api.eu.newrelic.com/v2/key_transactions/'
GET_APM_KT_URL = 'https://api.eu.newrelic.com/v2/key_transactions.json'
PUT_LABEL_URL = 'https://api.eu.newrelic.com/v2/labels.json'
GET_DASHBOARDS_URL = 'https://api.eu.newrelic.com/v2/dashboards.json'
SHOW_DASHBOARDS_URL = 'https://api.eu.newrelic.com/v2/dashboards/'
DEL_DASHBOARDS_URL = 'https://api.eu.newrelic.com/v2/dashboards/'
MONITORS_URL = 'https://synthetics.eu.newrelic.com/synthetics/api/v3/monitors/'
MONITORS_LABEL_URL = 'https://synthetics.eu.newrelic.com/synthetics/api/v4/monitors/'
INSIGHTS_URL = 'https://insights-api.eu.newrelic.com/v1/accounts/%s/query'
SEC_CREDENTIALS_URL = 'https://synthetics.eu.newrelic.com/synthetics/api/v1/secure-credentials'
ALERTS_CHANNEL_URL = 'https://api.eu.newrelic.com/v2/alerts_channels.json'
ALERT_POLICIES_URL = 'https://api.eu.newrelic.com/v2/alerts_policies.json'
ALERT_POLICY_CHANNELS_URL = 'https://api.eu.newrelic.com/v2/alerts_policy_channels.json'
DEL_ALERTS_URL = 'https://api.eu.newrelic.com/v2/alerts_policies/'
DEL_CHANNELS_URL = 'https://api.eu.newrelic.com/v2/alerts_channels/'
GET_APP_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_conditions.json'
APP_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_conditions/'
CREATE_APP_CONDITION_URL = 'https://api.eu.newrelic.com/v2/alerts_conditions/policies/'
GET_SYNTH_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_synthetics_conditions.json'
CREATE_SYNTHETICS_CONDITION_URL = 'https://api.eu.newrelic.com/v2/alerts_synthetics_conditions/policies/'
LOC_FAILURE_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_location_failure_conditions/policies/'
NRQL_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_nrql_conditions.json'
CREATE_NRQL_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_nrql_conditions/policies/'
EXTSVC_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_external_service_conditions.json'
CREATE_EXTSVC_CONDITION_URL = 'https://api.eu.newrelic.com/v2/alerts_external_service_conditions/policies/'
INFRA_CONDITIONS_URL = 'https://infra-api.eu.newrelic.com/v2/alerts/conditions'
CREATE_INFRA_CONDITION_URL = 'https://infra-api.eu.newrelic.com/v2/alerts/conditions'
ENTITY_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_entity_conditions'
ALERT_VIOLATIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_violations.json'
| 65.340426
| 111
| 0.760501
| 864
| 6,142
| 5.130787
| 0.101852
| 0.122716
| 0.170088
| 0.162869
| 0.921272
| 0.918791
| 0.909542
| 0.904805
| 0.880893
| 0.835777
| 0
| 0.011937
| 0.099805
| 6,142
| 93
| 112
| 66.043011
| 0.789835
| 0
| 0
| 0
| 0
| 0
| 0.599642
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011905
| false
| 0
| 0.02381
| 0
| 0.940476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.