hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
967f0eaa095f29547d8db0e33285387cfb0d92a9
| 17,794
|
py
|
Python
|
vsts/vsts/work_item_tracking_process/v4_0/work_item_tracking_process_client.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/work_item_tracking_process/v4_0/work_item_tracking_process_client.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/work_item_tracking_process/v4_0/work_item_tracking_process_client.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...vss_client import VssClient
from . import models
class WorkItemTrackingClient(VssClient):
"""WorkItemTracking
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(WorkItemTrackingClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = None
def get_behavior(self, process_id, behavior_ref_name, expand=None):
"""GetBehavior.
[Preview API]
:param str process_id:
:param str behavior_ref_name:
:param str expand:
:rtype: :class:`<WorkItemBehavior> <work-item-tracking.v4_0.models.WorkItemBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e',
version='4.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemBehavior', response)
def get_behaviors(self, process_id, expand=None):
"""GetBehaviors.
[Preview API]
:param str process_id:
:param str expand:
:rtype: [WorkItemBehavior]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e',
version='4.0-preview.1',
route_values=route_values,
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[WorkItemBehavior]', response)
def get_fields(self, process_id):
"""GetFields.
[Preview API]
:param str process_id:
:rtype: [FieldModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
response = self._send(http_method='GET',
location_id='7a0e7a1a-0b34-4ae0-9744-0aaffb7d0ed1',
version='4.0-preview.1',
route_values=route_values,
returns_collection=True)
return self._deserialize('[FieldModel]', response)
def get_work_item_type_fields(self, process_id, wit_ref_name):
"""GetWorkItemTypeFields.
[Preview API]
:param str process_id:
:param str wit_ref_name:
:rtype: [FieldModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196',
version='4.0-preview.1',
route_values=route_values,
returns_collection=True)
return self._deserialize('[FieldModel]', response)
def create_process(self, create_request):
"""CreateProcess.
[Preview API]
:param :class:`<CreateProcessModel> <work-item-tracking.v4_0.models.CreateProcessModel>` create_request:
:rtype: :class:`<ProcessModel> <work-item-tracking.v4_0.models.ProcessModel>`
"""
content = self._serialize.body(create_request, 'CreateProcessModel')
response = self._send(http_method='POST',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='4.0-preview.1',
content=content)
return self._deserialize('ProcessModel', response)
def delete_process(self, process_type_id):
"""DeleteProcess.
[Preview API]
:param str process_type_id:
"""
route_values = {}
if process_type_id is not None:
route_values['processTypeId'] = self._serialize.url('process_type_id', process_type_id, 'str')
self._send(http_method='DELETE',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='4.0-preview.1',
route_values=route_values)
def get_process_by_id(self, process_type_id, expand=None):
"""GetProcessById.
[Preview API]
:param str process_type_id:
:param str expand:
:rtype: :class:`<ProcessModel> <work-item-tracking.v4_0.models.ProcessModel>`
"""
route_values = {}
if process_type_id is not None:
route_values['processTypeId'] = self._serialize.url('process_type_id', process_type_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='4.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ProcessModel', response)
def get_processes(self, expand=None):
"""GetProcesses.
[Preview API]
:param str expand:
:rtype: [ProcessModel]
"""
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='4.0-preview.1',
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[ProcessModel]', response)
def update_process(self, update_request, process_type_id):
"""UpdateProcess.
[Preview API]
:param :class:`<UpdateProcessModel> <work-item-tracking.v4_0.models.UpdateProcessModel>` update_request:
:param str process_type_id:
:rtype: :class:`<ProcessModel> <work-item-tracking.v4_0.models.ProcessModel>`
"""
route_values = {}
if process_type_id is not None:
route_values['processTypeId'] = self._serialize.url('process_type_id', process_type_id, 'str')
content = self._serialize.body(update_request, 'UpdateProcessModel')
response = self._send(http_method='PATCH',
location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af',
version='4.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('ProcessModel', response)
def add_work_item_type_rule(self, field_rule, process_id, wit_ref_name):
"""AddWorkItemTypeRule.
[Preview API]
:param :class:`<FieldRuleModel> <work-item-tracking.v4_0.models.FieldRuleModel>` field_rule:
:param str process_id:
:param str wit_ref_name:
:rtype: :class:`<FieldRuleModel> <work-item-tracking.v4_0.models.FieldRuleModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(field_rule, 'FieldRuleModel')
response = self._send(http_method='POST',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='4.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('FieldRuleModel', response)
def delete_work_item_type_rule(self, process_id, wit_ref_name, rule_id):
"""DeleteWorkItemTypeRule.
[Preview API]
:param str process_id:
:param str wit_ref_name:
:param str rule_id:
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if rule_id is not None:
route_values['ruleId'] = self._serialize.url('rule_id', rule_id, 'str')
self._send(http_method='DELETE',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='4.0-preview.1',
route_values=route_values)
def get_work_item_type_rule(self, process_id, wit_ref_name, rule_id):
"""GetWorkItemTypeRule.
[Preview API]
:param str process_id:
:param str wit_ref_name:
:param str rule_id:
:rtype: :class:`<FieldRuleModel> <work-item-tracking.v4_0.models.FieldRuleModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if rule_id is not None:
route_values['ruleId'] = self._serialize.url('rule_id', rule_id, 'str')
response = self._send(http_method='GET',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='4.0-preview.1',
route_values=route_values)
return self._deserialize('FieldRuleModel', response)
def get_work_item_type_rules(self, process_id, wit_ref_name):
"""GetWorkItemTypeRules.
[Preview API]
:param str process_id:
:param str wit_ref_name:
:rtype: [FieldRuleModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='4.0-preview.1',
route_values=route_values,
returns_collection=True)
return self._deserialize('[FieldRuleModel]', response)
def update_work_item_type_rule(self, field_rule, process_id, wit_ref_name, rule_id):
"""UpdateWorkItemTypeRule.
[Preview API]
:param :class:`<FieldRuleModel> <work-item-tracking.v4_0.models.FieldRuleModel>` field_rule:
:param str process_id:
:param str wit_ref_name:
:param str rule_id:
:rtype: :class:`<FieldRuleModel> <work-item-tracking.v4_0.models.FieldRuleModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if rule_id is not None:
route_values['ruleId'] = self._serialize.url('rule_id', rule_id, 'str')
content = self._serialize.body(field_rule, 'FieldRuleModel')
response = self._send(http_method='PUT',
location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3',
version='4.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('FieldRuleModel', response)
def get_state_definition(self, process_id, wit_ref_name, state_id):
"""GetStateDefinition.
[Preview API]
:param str process_id:
:param str wit_ref_name:
:param str state_id:
:rtype: :class:`<WorkItemStateResultModel> <work-item-tracking.v4_0.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
response = self._send(http_method='GET',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='4.0-preview.1',
route_values=route_values)
return self._deserialize('WorkItemStateResultModel', response)
def get_state_definitions(self, process_id, wit_ref_name):
"""GetStateDefinitions.
[Preview API]
:param str process_id:
:param str wit_ref_name:
:rtype: [WorkItemStateResultModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='31015d57-2dff-4a46-adb3-2fb4ee3dcec9',
version='4.0-preview.1',
route_values=route_values,
returns_collection=True)
return self._deserialize('[WorkItemStateResultModel]', response)
def get_work_item_type(self, process_id, wit_ref_name, expand=None):
"""GetWorkItemType.
[Preview API]
:param str process_id:
:param str wit_ref_name:
:param str expand:
:rtype: :class:`<WorkItemTypeModel> <work-item-tracking.v4_0.models.WorkItemTypeModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7',
version='4.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemTypeModel', response)
def get_work_item_types(self, process_id, expand=None):
"""GetWorkItemTypes.
[Preview API]
:param str process_id:
:param str expand:
:rtype: [WorkItemTypeModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7',
version='4.0-preview.1',
route_values=route_values,
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[WorkItemTypeModel]', response)
| 47.450667
| 112
| 0.58323
| 1,885
| 17,794
| 5.226525
| 0.102387
| 0.087089
| 0.045676
| 0.042631
| 0.818819
| 0.801462
| 0.758526
| 0.727162
| 0.711937
| 0.707065
| 0
| 0.033676
| 0.295774
| 17,794
| 374
| 113
| 47.57754
| 0.752534
| 0.190345
| 0
| 0.799127
| 0
| 0
| 0.152397
| 0.051562
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082969
| false
| 0
| 0.0131
| 0
| 0.174672
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
968b965eb3cab86f6869a9d584575518f06675f2
| 22,435
|
py
|
Python
|
utils/scripts/OOOlevelGen/src/sprite_templates/ShoveIt.py
|
fullscreennl/bullettime
|
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/sprite_templates/ShoveIt.py
|
fullscreennl/bullettime
|
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/sprite_templates/ShoveIt.py
|
fullscreennl/bullettime
|
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
|
[
"MIT"
] | null | null | null |
import MonsterBuilder
from sprites import *
def create(lb,xpos):
xml = """ <level>
<!-- BEGIN Monster construction -->
<!--
<sprite shape="rect" type="Enemy.EnemySprite" x="215" y="69" width="153" height="69" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt2" sheet="6" firstframe="ShoveIt_body.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="105" y="91" width="134" height="38" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt16" sheet="6" firstframe="ShoveIt_piston.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="-1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="153" y="40" width="79" height="79" angle="0" restitution="0.2" static="false" friction="0.5" density="20" sheet="6" firstframe="ShoveIt_wheel.png" setName="ShoveIt0" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="-1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="268" y="39" width="79" height="79" angle="0" restitution="0.2" static="false" friction="0.5" density="10" sheet="6" firstframe="ShoveIt_wheel.png" setName="ShoveIt1" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="276" y="157" width="16" height="108" angle="6" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt5" sheet="6" firstframe="ShoveIt_roofsupport.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="201" y="155" width="9" height="107" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt6" sheet="6" firstframe="ShoveIt_roofsupport.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="245" y="218" width="104" height="20" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt7" sheet="6" firstframe="ShoveIt_roof.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="25" y="94" width="49" height="140" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt18" sheet="6" firstframe="ShoveIt_scraper.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="377" y="105" width="34" height="34" angle="0" restitution="0.2" static="false" friction="0.5" density="5" sheet="6" firstframe="ShoveIt_corpsehead.png" setName="ShoveIt22" classname="ShoveItBrain" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="431" y="102" width="60" height="29" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt23" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="470" y="71" width="11" height="40" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt24" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="469" y="133" width="10" height="41" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt25" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="472" y="35" width="8" height="43" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt26" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="468" y="174" width="7" height="38" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt27" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="411" y="70" width="12" height="34" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt28" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="407" y="138" width="12" height="35" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt29" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="405" y="178" width="9" height="38" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt30" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="411" y="37" width="11" height="40" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt31" sheet="6" firstframe="redrect.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite type="Joints.RevoluteJoint" id="3" body1="ShoveIt0" body2="ShoveIt2" motor_speed="50.0" torque="1000.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="170" by="96" b2_Xoffset="-45" b2_Yoffset="27" ax="153" ay="40" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite type="Joints.RevoluteJoint" id="4" body1="ShoveIt1" body2="ShoveIt2" motor_speed="-1.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="248" by="94" b2_Xoffset="33" b2_Yoffset="25" ax="268" ay="39" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite type="Joints.DistanceJoint" id="9" body1="ShoveIt2" body2="ShoveIt6" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-21" b1_Yoffset="29" b2_Xoffset="-2" b2_Yoffset="-47" bx="199" by="108" ax="194" ay="98"/>
<sprite type="Joints.DistanceJoint" id="10" body1="ShoveIt7" body2="ShoveIt6" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-47" b1_Yoffset="2" b2_Xoffset="-2" b2_Yoffset="49" bx="199" by="204" ax="198" ay="220"/>
<sprite type="Joints.DistanceJoint" id="11" body1="ShoveIt5" body2="ShoveIt7" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-6" b1_Yoffset="49" b2_Xoffset="22" b2_Yoffset="-2" bx="267" by="216" ax="270" ay="206"/>
<sprite type="Joints.DistanceJoint" id="12" body1="ShoveIt2" body2="ShoveIt5" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="65" b1_Yoffset="32" b2_Xoffset="3" b2_Yoffset="-46" bx="279" by="111" ax="280" ay="101"/>
<sprite type="Joints.DistanceJoint" id="13" body1="ShoveIt2" body2="ShoveIt7" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="66" b1_Yoffset="31" b2_Xoffset="-47" b2_Yoffset="-4" bx="198" by="214" ax="281" ay="100"/>
<sprite type="Joints.DistanceJoint" id="14" body1="ShoveIt2" body2="ShoveIt7" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-20" b1_Yoffset="30" b2_Xoffset="23" b2_Yoffset="-2" bx="268" by="216" ax="195" ay="99"/>
<sprite type="Joints.RevoluteJoint" id="17" body1="ShoveIt16" body2="ShoveIt2" motor_speed="50.0" torque="1000.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="199" by="69" b2_Xoffset="-16" b2_Yoffset="0" ax="152" ay="93" b1_Xoffset="47" b1_Yoffset="2"/>
<sprite type="Joints.DistanceJoint" id="19" body1="ShoveIt16" body2="ShoveIt18" damping="0.2" freq="30" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="2" b1_Yoffset="0" b2_Xoffset="16" b2_Yoffset="-63" bx="41" by="31" ax="107" ay="91"/>
<sprite type="Joints.DistanceJoint" id="20" body1="ShoveIt18" body2="ShoveIt16" damping="0.2" freq="30" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="18" b1_Yoffset="-1" b2_Xoffset="-50" b2_Yoffset="2" bx="55" by="93" ax="43" ay="93"/>
<sprite type="Joints.DistanceJoint" id="21" body1="ShoveIt18" body2="ShoveIt16" damping="0.2" freq="30" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="17" b1_Yoffset="66" b2_Xoffset="3" b2_Yoffset="1" bx="108" by="92" ax="42" ay="160"/>
<sprite type="Joints.DistanceJoint" id="32" body1="ShoveIt22" body2="ShoveIt2" damping="0.2" freq="2" texture_type="line" texture="rect.png" texture_width="20" b1_Xoffset="-12" b1_Yoffset="-1" b2_Xoffset="66" b2_Yoffset="30" bx="281" by="99" ax="365" ay="104"/>
<sprite type="Joints.DistanceJoint" id="33" body1="ShoveIt29" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-3" b1_Yoffset="-14" b2_Xoffset="-27" b2_Yoffset="14" bx="404" by="116" ax="404" ay="124"/>
<sprite type="Joints.DistanceJoint" id="34" body1="ShoveIt28" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-2" b1_Yoffset="16" b2_Xoffset="-27" b2_Yoffset="-9" bx="404" by="93" ax="409" ay="86"/>
<sprite type="Joints.DistanceJoint" id="35" body1="ShoveIt23" body2="ShoveIt22" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-28" b1_Yoffset="1" b2_Xoffset="15" b2_Yoffset="-1" bx="392" by="104" ax="403" ay="103"/>
<sprite type="Joints.DistanceJoint" id="36" body1="ShoveIt25" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-2" b1_Yoffset="-17" b2_Xoffset="26" b2_Yoffset="13" bx="457" by="115" ax="467" ay="116"/>
<sprite type="Joints.DistanceJoint" id="37" body1="ShoveIt24" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-1" b1_Yoffset="15" b2_Xoffset="26" b2_Yoffset="-11" bx="457" by="91" ax="469" ay="86"/>
<sprite type="Joints.DistanceJoint" id="38" body1="ShoveIt31" body2="ShoveIt28" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-2" b1_Yoffset="14" b2_Xoffset="-1" b2_Yoffset="-12" bx="410" by="58" ax="409" ay="51"/>
<sprite type="Joints.DistanceJoint" id="39" body1="ShoveIt30" body2="ShoveIt29" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="0" b1_Yoffset="-15" b2_Xoffset="-2" b2_Yoffset="14" bx="405" by="152" ax="405" ay="163"/>
<sprite type="Joints.DistanceJoint" id="40" body1="ShoveIt27" body2="ShoveIt25" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="0" b1_Yoffset="-19" b2_Xoffset="-1" b2_Yoffset="16" bx="468" by="149" ax="468" ay="155"/>
<sprite type="Joints.DistanceJoint" id="41" body1="ShoveIt26" body2="ShoveIt24" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-2" b1_Yoffset="12" b2_Xoffset="0" b2_Yoffset="-17" bx="470" by="54" ax="470" ay="47"/>
-->
<!-- END Monster construction -->
<!-- BEGIN Monster construction -->
<sprite shape="rect" type="Enemy.EnemySprite" x="215" y="69" width="153" height="69" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt2" sheet="6" firstframe="ShoveIt_body.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="105" y="91" width="134" height="38" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt16" sheet="6" firstframe="ShoveIt_piston.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="-1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="153" y="40" width="79" height="79" angle="0" restitution="0.2" static="false" friction="0.5" density="20" sheet="6" firstframe="ShoveIt_wheel.png" setName="ShoveIt0" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="-1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="268" y="39" width="79" height="79" angle="0" restitution="0.2" static="false" friction="0.5" density="10" sheet="6" firstframe="ShoveIt_wheel.png" setName="ShoveIt1" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="276" y="157" width="16" height="108" angle="6" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt5" sheet="6" firstframe="ShoveIt_roofsupport.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="201" y="155" width="9" height="107" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt6" sheet="6" firstframe="ShoveIt_roofsupport.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="245" y="218" width="104" height="20" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt7" sheet="6" firstframe="ShoveIt_roof.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="25" y="94" width="49" height="140" angle="0" restitution="0.2" static="false" friction="0.5" density="1" setName="ShoveIt18" sheet="6" firstframe="ShoveIt_scraper.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="377" y="105" width="34" height="34" angle="0" restitution="0.2" static="false" friction="0.5" density="5" sheet="6" firstframe="ShoveIt_corpsehead.png" setName="ShoveIt22" classname="ShoveItBrain" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="432" y="102" width="60" height="29" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt23" sheet="6" firstframe="ShoveIt_corpsebody.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="470" y="71" width="11" height="40" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt24" sheet="6" firstframe="ShoveIt_corpseupperbodypart.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="468" y="133" width="10" height="41" angle="180" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt25" sheet="6" firstframe="ShoveIt_corpseupperbodypart.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="472" y="35" width="8" height="43" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt26" sheet="6" firstframe="ShoveIt_corpselowerleg.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="469" y="174" width="7" height="38" angle="180" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt27" sheet="6" firstframe="ShoveIt_corpselowerleg.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="411" y="70" width="12" height="34" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt28" sheet="6" firstframe="ShoveIt_corpseupperbodypart.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="406" y="138" width="12" height="35" angle="-180" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt29" sheet="6" firstframe="ShoveIt_corpseupperbodypart.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="407" y="177" width="10" height="34" angle="-180" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt30" sheet="6" firstframe="ShoveIt_corpselowerarm.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="411" y="37" width="11" height="40" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="ShoveIt31" sheet="6" firstframe="ShoveIt_corpselowerarm.png" classname="ShoveItLimb" spritedata="ShoveIt" groupIndex="1"/>
<sprite type="Joints.RevoluteJoint" id="3" body1="ShoveIt0" body2="ShoveIt2" motor_speed="50.0" torque="1000.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="170" by="96" b2_Xoffset="-45" b2_Yoffset="27" ax="153" ay="40" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite type="Joints.RevoluteJoint" id="4" body1="ShoveIt1" body2="ShoveIt2" motor_speed="-1.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="248" by="94" b2_Xoffset="33" b2_Yoffset="25" ax="268" ay="39" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite type="Joints.DistanceJoint" id="9" body1="ShoveIt2" body2="ShoveIt6" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-21" b1_Yoffset="29" b2_Xoffset="-2" b2_Yoffset="-47" bx="199" by="108" ax="194" ay="98"/>
<sprite type="Joints.DistanceJoint" id="10" body1="ShoveIt7" body2="ShoveIt6" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-47" b1_Yoffset="2" b2_Xoffset="-2" b2_Yoffset="49" bx="199" by="204" ax="198" ay="220"/>
<sprite type="Joints.DistanceJoint" id="11" body1="ShoveIt5" body2="ShoveIt7" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-6" b1_Yoffset="49" b2_Xoffset="22" b2_Yoffset="-2" bx="267" by="216" ax="270" ay="206"/>
<sprite type="Joints.DistanceJoint" id="12" body1="ShoveIt2" body2="ShoveIt5" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="65" b1_Yoffset="32" b2_Xoffset="3" b2_Yoffset="-46" bx="279" by="111" ax="280" ay="101"/>
<sprite type="Joints.DistanceJoint" id="13" body1="ShoveIt2" body2="ShoveIt7" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="66" b1_Yoffset="31" b2_Xoffset="-47" b2_Yoffset="-4" bx="198" by="214" ax="281" ay="100"/>
<sprite type="Joints.DistanceJoint" id="14" body1="ShoveIt2" body2="ShoveIt7" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-20" b1_Yoffset="30" b2_Xoffset="23" b2_Yoffset="-2" bx="268" by="216" ax="195" ay="99"/>
<sprite type="Joints.RevoluteJoint" id="17" body1="ShoveIt16" body2="ShoveIt2" motor_speed="50.0" torque="1000.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="199" by="69" b2_Xoffset="-16" b2_Yoffset="0" ax="152" ay="93" b1_Xoffset="47" b1_Yoffset="2"/>
<sprite type="Joints.DistanceJoint" id="19" body1="ShoveIt16" body2="ShoveIt18" damping="0.2" freq="30" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="2" b1_Yoffset="0" b2_Xoffset="16" b2_Yoffset="-63" bx="41" by="31" ax="107" ay="91"/>
<sprite type="Joints.DistanceJoint" id="20" body1="ShoveIt18" body2="ShoveIt16" damping="0.2" freq="30" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="18" b1_Yoffset="-1" b2_Xoffset="-50" b2_Yoffset="2" bx="55" by="93" ax="43" ay="93"/>
<sprite type="Joints.DistanceJoint" id="21" body1="ShoveIt18" body2="ShoveIt16" damping="0.2" freq="30" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="17" b1_Yoffset="66" b2_Xoffset="3" b2_Yoffset="1" bx="108" by="92" ax="42" ay="160"/>
<sprite type="Joints.DistanceJoint" id="32" body1="ShoveIt22" body2="ShoveIt2" damping="0.2" freq="2" texture_type="line" texture="rect.png" texture_width="20" b1_Xoffset="-12" b1_Yoffset="-1" b2_Xoffset="66" b2_Yoffset="30" bx="281" by="99" ax="365" ay="104"/>
<sprite type="Joints.DistanceJoint" id="33" body1="ShoveIt29" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="0" b1_Yoffset="-14" b2_Xoffset="-27" b2_Yoffset="13" bx="405" by="115" ax="406" ay="124"/>
<sprite type="Joints.DistanceJoint" id="34" body1="ShoveIt28" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-2" b1_Yoffset="16" b2_Xoffset="-27" b2_Yoffset="-9" bx="405" by="93" ax="409" ay="86"/>
<sprite type="Joints.DistanceJoint" id="35" body1="ShoveIt23" body2="ShoveIt22" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-28" b1_Yoffset="1" b2_Xoffset="15" b2_Yoffset="-1" bx="392" by="104" ax="404" ay="103"/>
<sprite type="Joints.DistanceJoint" id="36" body1="ShoveIt25" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-1" b1_Yoffset="-16" b2_Xoffset="26" b2_Yoffset="13" bx="458" by="115" ax="467" ay="117"/>
<sprite type="Joints.DistanceJoint" id="37" body1="ShoveIt24" body2="ShoveIt23" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-1" b1_Yoffset="15" b2_Xoffset="26" b2_Yoffset="-11" bx="458" by="91" ax="469" ay="86"/>
<sprite type="Joints.DistanceJoint" id="38" body1="ShoveIt31" body2="ShoveIt28" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-2" b1_Yoffset="14" b2_Xoffset="-1" b2_Yoffset="-12" bx="410" by="58" ax="409" ay="51"/>
<sprite type="Joints.DistanceJoint" id="39" body1="ShoveIt30" body2="ShoveIt29" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="0" b1_Yoffset="-15" b2_Xoffset="1" b2_Yoffset="13" bx="407" by="151" ax="407" ay="162"/>
<sprite type="Joints.DistanceJoint" id="40" body1="ShoveIt27" body2="ShoveIt25" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="0" b1_Yoffset="-19" b2_Xoffset="1" b2_Yoffset="15" bx="469" by="148" ax="469" ay="155"/>
<sprite type="Joints.DistanceJoint" id="41" body1="ShoveIt26" body2="ShoveIt24" damping="0.2" freq="20" texture_type="none" texture="rect.png" texture_width="20" b1_Xoffset="-2" b1_Yoffset="12" b2_Xoffset="0" b2_Yoffset="-17" bx="470" by="54" ax="470" ay="47"/>
<!-- END Monster construction -->
</level>
"""
MonsterBuilder.createFromXMLString(lb,xpos,xml)
lb.addObject(Enemy.EnemySprite(x=(238+xpos), y=134,width=55,height=55,angle='0',restitution=0.8,static='false',friction=0.5,density=1,classname='BlobSprite',firstframe='monsterblob.png' ))
| 217.815534
| 314
| 0.717985
| 3,474
| 22,435
| 4.546056
| 0.0711
| 0.009371
| 0.044577
| 0.069778
| 0.973089
| 0.970177
| 0.970177
| 0.959032
| 0.955677
| 0.955677
| 0
| 0.107967
| 0.067395
| 22,435
| 103
| 315
| 217.815534
| 0.646848
| 0
| 0
| 0.553191
| 0
| 0.851064
| 0.986851
| 0.186798
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010638
| false
| 0
| 0.021277
| 0
| 0.031915
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
96b09af355b875cd229a66cdb5f65e137f05cd24
| 78,460
|
py
|
Python
|
sdl2/sdlgfx.py
|
smcv/py-sdl2
|
209095d858b461c6314f7b7b96b2051ec1656d20
|
[
"CC0-1.0"
] | null | null | null |
sdl2/sdlgfx.py
|
smcv/py-sdl2
|
209095d858b461c6314f7b7b96b2051ec1656d20
|
[
"CC0-1.0"
] | null | null | null |
sdl2/sdlgfx.py
|
smcv/py-sdl2
|
209095d858b461c6314f7b7b96b2051ec1656d20
|
[
"CC0-1.0"
] | null | null | null |
import os
from ctypes import Structure, POINTER, c_int, c_float, c_void_p, c_char, \
c_char_p, c_double
from ctypes import POINTER as _P
from .dll import DLL, SDLFunc
from .stdinc import Uint8, Uint32, Sint16
from .render import SDL_Renderer
from .surface import SDL_Surface
# NOTE: This module is currently missing wrappers for the image filtering
# functions in SDL2_imageFilter.h. However, because we have Pillow on Python
# this isn't really a pressing concern. Time permitting, these functions may
# be wrapped at a later date for the sake of completeness.
__all__ = [
# Structs
"FPSManager",
# Defines
"FPS_UPPER_LIMIT", "FPS_LOWER_LIMIT", "FPS_DEFAULT",
"SDL2_GFXPRIMITIVES_MAJOR", "SDL2_GFXPRIMITIVES_MAJOR",
"SDL2_GFXPRIMITIVES_MICRO", "SMOOTHING_OFF", "SMOOTHING_ON",
# Functions
"SDL_initFramerate", "SDL_getFramerate",
"SDL_setFramerate", "SDL_getFramecount", "SDL_framerateDelay",
"pixelColor", "pixelRGBA", "hlineColor",
"hlineRGBA", "vlineColor", "vlineRGBA", "rectangleColor",
"rectangleRGBA", "roundedRectangleColor", "roundedRectangleRGBA",
"boxColor", "boxRGBA", "roundedBoxColor", "roundedBoxRGBA",
"lineColor", "lineRGBA", "aalineColor", "aalineRGBA",
"thickLineColor", "thickLineRGBA", "circleColor", "circleRGBA",
"arcColor", "arcRGBA", "aacircleColor", "aacircleRGBA",
"filledCircleColor", "filledCircleRGBA", "ellipseColor",
"ellipseRGBA", "aaellipseColor", "aaellipseRGBA",
"filledEllipseColor", "filledEllipseRGBA", "pieColor", "pieRGBA",
"filledPieColor", "filledPieRGBA", "trigonColor", "trigonRGBA",
"aatrigonColor", "aatrigonRGBA", "filledTrigonColor",
"filledTrigonRGBA", "polygonColor", "polygonRGBA", "aapolygonColor",
"aapolygonRGBA", "filledPolygonColor", "filledPolygonRGBA",
"texturedPolygon", "bezierColor", "bezierRGBA",
"gfxPrimitivesSetFont", "gfxPrimitivesSetFontRotation",
"characterColor", "characterRGBA", "stringColor", "stringRGBA",
"rotozoomSurface", "rotozoomSurfaceXY", "rotozoomSurfaceSize",
"rotozoomSurfaceSizeXY", "zoomSurface", "zoomSurfaceSize", "shrinkSurface",
"rotateSurface90Degrees",
# Python Functions
"get_dll_file"
]
try:
dll = DLL("SDL2_gfx", ["SDL2_gfx", "SDL2_gfx-1.0"],
os.getenv("PYSDL2_DLL_PATH"))
except RuntimeError as exc:
raise ImportError(exc)
def get_dll_file():
"""Gets the file name of the loaded SDL2_gfx library."""
return dll.libfile
_bind = dll.bind_function
# Constants, enums, type definitions, and macros
SDL2_GFXPRIMITIVES_MAJOR = 1
SDL2_GFXPRIMITIVES_MINOR = 0
SDL2_GFXPRIMITIVES_MICRO = 4
FPS_UPPER_LIMIT = 200
FPS_LOWER_LIMIT = 1
FPS_DEFAULT = 30
SMOOTHING_OFF = 0
SMOOTHING_ON = 1
class FPSManager(Structure):
"""A structure holding the state and timing of the framerate manager.
This class can be used with other SDL_gfx functions to set a custom
framerate within a given rendering loop. When used with
:func:`SDL_framerateDelay`, it uses its initial frame onset time
(:attr:`baseticks`) and the duration per frame to try to present frames
at consistent intervals from that initial point.
.. note::
This method of frame pacing may not play nicely with vsync in SDL2.
Attributes:
framecount (int): The number of frames counted by the manager since
being initialized.
rateticks (float): The time delay (in ms) between each frame.
baseticks (int): The milliseconds since SDL initialization at which the
manager was initialized with :func:`SDL_initFramerate`. Used
internally as the initial frame onset time.
lastticks (int): The milliseconds since SDL initialization at which the
previous frame was displayed.
rate (int): The framerate (in Hz) of the manager.
"""
_fields_ = [("framecount", Uint32),
("rateticks", c_float),
("baseticks", Uint32),
("lastticks", Uint32),
("rate", Uint32)
]
# Raw ctypes function definitions
_funcdefs = [
SDLFunc("SDL_initFramerate", [_P(FPSManager)]),
SDLFunc("SDL_setFramerate", [_P(FPSManager), Uint32], c_int),
SDLFunc("SDL_getFramerate", [_P(FPSManager)], c_int),
SDLFunc("SDL_getFramecount", [_P(FPSManager)], Uint32),
SDLFunc("SDL_framerateDelay", [_P(FPSManager)], Uint32),
SDLFunc("pixelColor", [_P(SDL_Renderer), Sint16, Sint16, Uint32], c_int),
SDLFunc("pixelRGBA", [_P(SDL_Renderer), Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("hlineColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("hlineRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("vlineColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("vlineRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("rectangleColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("rectangleRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("roundedRectangleColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("roundedRectangleRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("boxColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("boxRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("roundedBoxColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("roundedBoxRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("lineColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("lineRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("aalineColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("aalineRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("thickLineColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint32], c_int),
SDLFunc("thickLineRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("circleColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("circleRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("arcColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("arcRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("aacircleColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("aacircleRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("filledCircleColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("filledCircleRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("ellipseColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("ellipseRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("aaellipseColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("aaellipseRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("filledEllipseColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("filledEllipseRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("pieColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("pieRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("filledPieColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("filledPieRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("trigonColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("trigonRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("aatrigonColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("aatrigonRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("filledTrigonColor", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Sint16, Uint32], c_int),
SDLFunc("filledTrigonRGBA", [_P(SDL_Renderer), Sint16, Sint16, Sint16, Sint16, Sint16, Sint16, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("polygonColor", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, Uint32], c_int),
SDLFunc("polygonRGBA", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("aapolygonColor", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, Uint32], c_int),
SDLFunc("aapolygonRGBA", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("filledPolygonColor", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, Uint32], c_int),
SDLFunc("filledPolygonRGBA", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("texturedPolygon", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, _P(SDL_Surface), c_int, c_int], c_int),
SDLFunc("bezierColor", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, c_int, Uint32], c_int),
SDLFunc("bezierRGBA", [_P(SDL_Renderer), _P(Sint16), _P(Sint16), c_int, c_int, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("gfxPrimitivesSetFont", [c_void_p, Uint32, Uint32]),
SDLFunc("gfxPrimitivesSetFontRotation", [Uint32]),
SDLFunc("characterColor", [_P(SDL_Renderer), Sint16, Sint16, c_char, Uint32], c_int),
SDLFunc("characterRGBA", [_P(SDL_Renderer), Sint16, Sint16, c_char, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("stringColor", [_P(SDL_Renderer), Sint16, Sint16, c_char_p, Uint32], c_int),
SDLFunc("stringRGBA", [_P(SDL_Renderer), Sint16, Sint16, c_char_p, Uint8, Uint8, Uint8, Uint8], c_int),
SDLFunc("rotozoomSurface", [_P(SDL_Surface), c_double, c_double, c_int], _P(SDL_Surface)),
SDLFunc("rotozoomSurfaceXY", [_P(SDL_Surface), c_double, c_double, c_double, c_int], _P(SDL_Surface)),
SDLFunc("rotozoomSurfaceSize", [c_int, c_int, c_double, c_double, _P(c_int), _P(c_int)]),
SDLFunc("rotozoomSurfaceSizeXY", [c_int, c_int, c_double, c_double, c_double, _P(c_int), _P(c_int)]),
SDLFunc("zoomSurface", [_P(SDL_Surface), c_double, c_double, c_int], _P(SDL_Surface)),
SDLFunc("zoomSurfaceSize", [c_int, c_int, c_double, c_double, _P(c_int), _P(c_int)]),
SDLFunc("shrinkSurface", [_P(SDL_Surface), c_int, c_int], _P(SDL_Surface)),
SDLFunc("rotateSurface90Degrees", [_P(SDL_Surface), c_int], _P(SDL_Surface)),
]
_funcs = {}
for f in _funcdefs:
_funcs[f.name] = _bind(f.name, f.args, f.returns, f.added)
# Python wrapper functions
def SDL_initFramerate(manager):
"""Initializes a framerate manager.
Calling this function on an :class:`FPSManager` initializes it with a
default framerate of 30 Hz and prepares it for counting and timing frames.
If the manager was already initialized, calling this function will reset
its framecount, initial frame onset time, and framerate.
Args:
manager (:obj:`sdlgfx.FPSManager`): The framerate manager to initialize.
"""
return _funcs["SDL_initFramerate"](manager)
def SDL_setFramerate(manager, rate):
"""Sets the framerate of a framerate manager.
Sets a new framerate for the manager, resetting both the framecount and the
the initial frame onset time. Framerates must be between ``FPS_LOWER_LIMIT``
(1) and ``FPS_UPPER_LIMIT`` (200), inclusive, to be accepted.
Args:
manager (:obj:`sdlgfx.FPSManager`): The framerate manager to configure.
rate (int): The new framerate in Hz.
Returns:
int: 0 on success, or -1 if an error occurred.
"""
return _funcs["SDL_setFramerate"](manager, rate)
def SDL_getFramerate(manager):
"""Gets the current framerate for a framerate manager.
Args:
manager (:obj:`sdlgfx.FPSManager`): The framerate manager for which the
currently set framerate will be retrieved.
Returns:
int: 0 on success, or -1 if an error occurred.
"""
return _funcs["SDL_getFramerate"](manager)
def SDL_getFramecount(manager):
"""Gets the current number of frames counted by a framerate manager.
.. note::
This value is reset whenever a frame is dropped (i.e. the rendering
loop takes longer than the set interval between frames) or the framerate
is changed.
Args:
manager (:obj:`sdlgfx.FPSManager`): The framerate manager for which the
current framecount will be retrieved.
Returns:
int: 0 on success, or -1 if an error occurred.
"""
return _funcs["SDL_getFramecount"](manager)
def SDL_framerateDelay(manager):
"""Delays execution until the next frame occurs.
This function waits for the next frame onset (as determined by the rate set
by :func:`SDL_setFramerate`) to keep frame pacing consistent. This should be
called once per loop within the program's main rendering loop.
If the rendering loop takes longer than the set framerate, the delay will be
zero and the framecount and initial frame onset time will be reset.
Args:
manager (:obj:`sdlgfx.FPSManager`): The framerate manager to use for
frame pacing.
Returns:
int: 0 on success, or -1 if an error occurred.
"""
return _funcs["SDL_framerateDelay"](manager)
def pixelColor(renderer, x, y, color):
"""Draws a single pixel to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X (horizontal) coordinate of the pixel.
y (int): The Y (vertical) coordinate of the pixel.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["pixelColor"](renderer, x, y, color)
def pixelRGBA(renderer, x, y, r, g, b, a):
"""Draws a single pixel to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X (horizontal) coordinate of the pixel.
y (int): The Y (vertical) coordinate of the pixel.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["pixelRGBA"](renderer, x, y, r, g, b, a)
def hlineColor(renderer, x1, x2, y, color):
"""Draws a horizontal line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y (int): The Y (vertical) coordinate of the points of the line.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["hlineColor"](renderer, x1, x2, y, color)
def hlineRGBA(renderer, x1, x2, y, r, g, b, a):
"""Draws a horizontal line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y (int): The Y coordinate of the points of the line.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["hlineRGBA"](renderer, x1, x2, y, r, g, b, a)
def vlineColor(renderer, x, y1, y2, color):
"""Draws a vertical line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the points of the line.
y1 (int): The X coordinate of the first point of the line.
y2 (int): The Y coordinate of the second point of the line.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["vlineColor"](renderer, x, y1, y2, color)
def vlineRGBA(renderer, x, y1, y2, r, g, b, a):
"""Draws a vertical line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the points of the line.
y1 (int): The X coordinate of the first point of the line.
y2 (int): The Y coordinate of the second point of the line.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["vlineRGBA"](renderer, x, y1, y2, r, g, b, a)
def rectangleColor(renderer, x1, y1, x2, y2, color):
"""Draws an unfilled rectangle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["rectangleColor"](renderer, x1, y1, x2, y2, color)
def rectangleRGBA(renderer, x1, y1, x2, y2, r, g, b, a):
"""Draws an unfilled rectangle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["rectangleRGBA"](renderer, x1, y1, x2, y2, r, g, b, a)
def roundedRectangleColor(renderer, x1, y1, x2, y2, rad, color):
"""Draws an unfilled rectangle with rounded corners to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
rad (int): The radius of the arc of the rounded corners.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["roundedRectangleColor"](renderer, x1, y1, x2, y2, rad, color)
def roundedRectangleRGBA(renderer, x1, y1, x2, y2, rad, r, g, b, a):
"""Draws an unfilled rectangle with rounded corners to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
rad (int): The radius of the arc of the rounded corners.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["roundedRectangleRGBA"](renderer, x1, y1, x2, y2, rad, r, g, b, a)
def boxColor(renderer, x1, y1, x2, y2, color):
"""Draws a filled rectangle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["boxColor"](renderer, x1, y1, x2, y2, color)
def boxRGBA(renderer, x1, y1, x2, y2, r, g, b, a):
"""Draws a filled rectangle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["boxRGBA"](renderer, x1, y1, x2, y2, r, g, b, a)
def roundedBoxColor(renderer, x1, y1, x2, y2, rad, color):
"""Draws a filled rectangle with rounded corners to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
rad (int): The radius of the arc of the rounded corners.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["roundedBoxColor"](renderer, x1, y1, x2, y2, rad, color)
def roundedBoxRGBA(renderer, x1, y1, x2, y2, rad, r, g, b, a):
"""Draws a filled rectangle with rounded corners to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the top-right point of the rectangle.
y1 (int): The Y coordinate of the top-right point of the rectangle.
x2 (int): The X coordinate of the bottom-left point of the rectangle.
y2 (int): The Y coordinate of the bottom-left point of the rectangle.
rad (int): The radius of the arc of the rounded corners.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["roundedBoxRGBA"](renderer, x1, y1, x2, y2, rad, r, g, b, a)
def lineColor(renderer, x1, y1, x2, y2, color):
"""Draws a line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
y1 (int): The Y coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y2 (int): The Y coordinate of the second point of the line.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["lineColor"](renderer, x1, y1, x2, y2, color)
def lineRGBA(renderer, x1, y1, x2, y2, r, g, b, a):
"""Draws a line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
y1 (int): The Y coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y2 (int): The Y coordinate of the second point of the line.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["lineRGBA"](renderer, x1, y1, x2, y2, r, g, b, a)
def aalineColor(renderer, x1, y1, x2, y2, color):
"""Draws an anti-aliased line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
y1 (int): The Y coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y2 (int): The Y coordinate of the second point of the line.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aalineColor"](renderer, x1, y1, x2, y2, color)
def aalineRGBA(renderer, x1, y1, x2, y2, r, g, b, a):
"""Draws an anti-aliased line to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
y1 (int): The Y coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y2 (int): The Y coordinate of the second point of the line.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aalineRGBA"](renderer, x1, y1, x2, y2, r, g, b, a)
def thickLineColor(renderer, x1, y1, x2, y2, width, color):
"""Draws a line with a given thickness to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
y1 (int): The Y coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y2 (int): The Y coordinate of the second point of the line.
width (int): The thickness of the line in pixels (from 1 to 255).
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["thickLineColor"](renderer, x1, y1, x2, y2, width, color)
def thickLineRGBA(renderer, x1, y1, x2, y2, width, r, g, b, a):
"""Draws a line with a given thickness to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): The X coordinate of the first point of the line.
y1 (int): The Y coordinate of the first point of the line.
x2 (int): The X coordinate of the second point of the line.
y2 (int): The Y coordinate of the second point of the line.
width (int): The thickness of the line in pixels (from 1 to 255).
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["thickLineRGBA"](renderer, x1, y1, x2, y2, width, r, g, b, a)
def circleColor(renderer, x, y, rad, color):
"""Draws an unfilled circle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["circleColor"](renderer, x, y, rad, color)
def circleRGBA(renderer, x, y, rad, r, g, b, a):
"""Draws an unfilled circle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["circleRGBA"](renderer, x, y, rad, r, g, b, a)
def arcColor(renderer, x, y, rad, start, end, color):
"""Draws an arc to the renderer with a given color.
The start and end of the arc are defined in units of degrees, with 0 being
the bottom of the arc circle and increasing counter-clockwise (e.g. 90 being
the rightmost point of the circle).
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
start (int): The start of the arc (in degrees).
end (int): The end of the arc (in degrees).
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["arcColor"](renderer, x, y, rad, start, end, color)
def arcRGBA(renderer, x, y, rad, start, end, r, g, b, a):
"""Draws an arc to the renderer with a given color.
The start and end of the arc are defined in units of degrees, with 0 being
the bottom of the arc circle and increasing counter-clockwise (e.g. 90 being
the rightmost point of the circle).
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
start (int): The start of the arc (in degrees).
end (int): The end of the arc (in degrees).
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["arcRGBA"](renderer, x, y, rad, start, end, r, g, b, a)
def aacircleColor(renderer, x, y, rad, color):
"""Draws an anti-aliased unfilled circle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aacircleColor"](renderer, x, y, rad, color)
def aacircleRGBA(renderer, x, y, rad, r, g, b, a):
"""Draws an anti-aliased unfilled circle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aacircleRGBA"](renderer, x, y, rad, r, g, b, a)
def filledCircleColor(renderer, x, y, rad, color):
"""Draws a filled circle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledCircleColor"](renderer, x, y, rad, color)
def filledCircleRGBA(renderer, x, y, rad, r, g, b, a):
"""Draws a filled circle to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the circle.
y (int): The Y coordinate of the center of the circle.
rad (int): The radius (in pixels) of the circle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledCircleRGBA"](renderer, x, y, rad, r, g, b, a)
def ellipseColor(renderer, x, y, rx, ry, color):
"""Draws an unfilled ellipse to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the ellipse.
y (int): The Y coordinate of the center of the ellipse.
rx (int): The x-axis radius (i.e. width) of the ellipse.
ry (int): The y-axis radius (i.e. height) of the ellipse.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["ellipseColor"](renderer, x, y, rx, ry, color)
def ellipseRGBA(renderer, x, y, rx, ry, r, g, b, a):
"""Draws an unfilled ellipse to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the ellipse.
y (int): The Y coordinate of the center of the ellipse.
rx (int): The x-axis radius (i.e. width) of the ellipse.
ry (int): The y-axis radius (i.e. height) of the ellipse.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["ellipseRGBA"](renderer, x, y, rx, ry, r, g, b, a)
def aaellipseColor(renderer, x, y, rx, ry, color):
"""Draws an anti-aliased unfilled ellipse to the renderer in a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the ellipse.
y (int): The Y coordinate of the center of the ellipse.
rx (int): The x-axis radius (i.e. width) of the ellipse.
ry (int): The y-axis radius (i.e. height) of the ellipse.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aaellipseColor"](renderer, x, y, rx, ry, color)
def aaellipseRGBA(renderer, x, y, rx, ry, r, g, b, a):
"""Draws an anti-aliased unfilled ellipse to the renderer in a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the ellipse.
y (int): The Y coordinate of the center of the ellipse.
rx (int): The x-axis radius (i.e. width) of the ellipse.
ry (int): The y-axis radius (i.e. height) of the ellipse.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aaellipseRGBA"](renderer, x, y, rx, ry, r, g, b, a)
def filledEllipseColor(renderer, x, y, rx, ry, color):
"""Draws a filled ellipse to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the ellipse.
y (int): The Y coordinate of the center of the ellipse.
rx (int): The x-axis radius (i.e. width) of the ellipse.
ry (int): The y-axis radius (i.e. height) of the ellipse.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledEllipseColor"](renderer, x, y, rx, ry, color)
def filledEllipseRGBA(renderer, x, y, rx, ry, r, g, b, a):
"""Draws a filled ellipse to the renderer with a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the ellipse.
y (int): The Y coordinate of the center of the ellipse.
rx (int): The x-axis radius (i.e. width) of the ellipse.
ry (int): The y-axis radius (i.e. height) of the ellipse.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledEllipseRGBA"](renderer, x, y, rx, ry, r, g, b, a)
def pieColor(renderer, x, y, rad, start, end, color):
"""Draws an unfilled pie slice (i.e. circle segment) to the renderer.
The start and end of the pie are defined in units of degrees, with 0 being
the bottom of the circle and increasing counter-clockwise (e.g. 90 being
the rightmost point of the circle).
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the pie (circle).
y (int): The Y coordinate of the center of the pie (circle).
rad (int): The radius (in pixels) of the pie.
start (int): Start of the pie slice (in degrees).
end (int): End of the pie slice (in degrees)
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["pieColor"](renderer, x, y, rad, start, end, color)
def pieRGBA(renderer, x, y, rad, start, end, r, g, b, a):
"""Draws an unfilled pie slice (i.e. circle segment) to the renderer.
The start and end of the pie are defined in units of degrees, with 0 being
the bottom of the circle and increasing counter-clockwise (e.g. 90 being
the rightmost point of the circle).
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the pie (circle).
y (int): The Y coordinate of the center of the pie (circle).
rad (int): The radius (in pixels) of the pie.
start (int): Start of the pie slice (in degrees).
end (int): End of the pie slice (in degrees)
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["pieRGBA"](renderer, x, y, rad, start, end, r, g, b, a)
def filledPieColor(renderer, x, y, rad, start, end, color):
"""Draws a filled pie slice (i.e. circle segment) to the renderer.
The start and end of the pie are defined in units of degrees, with 0 being
the bottom of the circle and increasing counter-clockwise (e.g. 90 being
the rightmost point of the circle).
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the pie (circle).
y (int): The Y coordinate of the center of the pie (circle).
rad (int): The radius (in pixels) of the pie.
start (int): Start of the pie slice (in degrees).
end (int): End of the pie slice (in degrees)
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledPieColor"](renderer, x, y, rad, start, end, color)
def filledPieRGBA(renderer, x, y, rad, start, end, r, g, b, a):
"""Draws a filled pie slice (i.e. circle segment) to the renderer.
The start and end of the pie are defined in units of degrees, with 0 being
the bottom of the circle and increasing counter-clockwise (e.g. 90 being
the rightmost point of the circle).
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the center of the pie (circle).
y (int): The Y coordinate of the center of the pie (circle).
rad (int): The radius (in pixels) of the pie.
start (int): Start of the pie slice (in degrees).
end (int): End of the pie slice (in degrees)
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledPieRGBA"](renderer, x, y, rad, start, end, r, g, b, a)
def trigonColor(renderer, x1, y1, x2, y2, x3, y3, color):
"""Draws a trigon (i.e. triangle outline) to the renderer in a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): X coordinate of the first point of the triangle.
y1 (int): Y coordinate of the first point of the triangle.
x2 (int): X coordinate of the second point of the triangle.
y2 (int): Y coordinate of the second point of the triangle.
x3 (int): X coordinate of the third point of the triangle.
y3 (int): Y coordinate of the third point of the triangle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["trigonColor"](renderer, x1, y1, x2, y2, x3, y3, color)
def trigonRGBA(renderer, x1, y1, x2, y2, x3, y3, r, g, b, a):
"""Draws a trigon (i.e. triangle outline) to the renderer in a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): X coordinate of the first point of the triangle.
y1 (int): Y coordinate of the first point of the triangle.
x2 (int): X coordinate of the second point of the triangle.
y2 (int): Y coordinate of the second point of the triangle.
x3 (int): X coordinate of the third point of the triangle.
y3 (int): Y coordinate of the third point of the triangle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["trigonRGBA"](renderer, x1, y1, x2, y2, x3, y3, r, g, b, a)
def aatrigonColor(renderer, x1, y1, x2, y2, x3, y3, color):
"""Draws an anti-aliased trigon (i.e. triangle outline) to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): X coordinate of the first point of the triangle.
y1 (int): Y coordinate of the first point of the triangle.
x2 (int): X coordinate of the second point of the triangle.
y2 (int): Y coordinate of the second point of the triangle.
x3 (int): X coordinate of the third point of the triangle.
y3 (int): Y coordinate of the third point of the triangle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aatrigonColor"](renderer, x1, y1, x2, y2, x3, y3, color)
def aatrigonRGBA(renderer, x1, y1, x2, y2, x3, y3, r, g, b, a):
"""Draws an anti-aliased trigon (i.e. triangle outline) to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): X coordinate of the first point of the triangle.
y1 (int): Y coordinate of the first point of the triangle.
x2 (int): X coordinate of the second point of the triangle.
y2 (int): Y coordinate of the second point of the triangle.
x3 (int): X coordinate of the third point of the triangle.
y3 (int): Y coordinate of the third point of the triangle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aatrigonRGBA"](renderer, x1, y1, x2, y2, x3, y3, r, g, b, a)
def filledTrigonColor(renderer, x1, y1, x2, y2, x3, y3, color):
"""Draws a filled triangle to the renderer in a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): X coordinate of the first point of the triangle.
y1 (int): Y coordinate of the first point of the triangle.
x2 (int): X coordinate of the second point of the triangle.
y2 (int): Y coordinate of the second point of the triangle.
x3 (int): X coordinate of the third point of the triangle.
y3 (int): Y coordinate of the third point of the triangle.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledTrigonColor"](renderer, x1, y1, x2, y2, x3, y3, color)
def filledTrigonRGBA(renderer, x1, y1, x2, y2, x3, y3, r, g, b, a):
"""Draws a filled triangle to the renderer in a given color.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x1 (int): X coordinate of the first point of the triangle.
y1 (int): Y coordinate of the first point of the triangle.
x2 (int): X coordinate of the second point of the triangle.
y2 (int): Y coordinate of the second point of the triangle.
x3 (int): X coordinate of the third point of the triangle.
y3 (int): Y coordinate of the third point of the triangle.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledTrigonRGBA"](renderer, x1, y1, x2, y2, x3, y3, r, g, b, a)
def polygonColor(renderer, vx, vy, n, color):
"""Draws an unfilled polygon to the renderer in a given color.
Vertices are specified as ``ctypes.c_int16`` arrays, with two arrays of
equal size defining the x and y coordinates of the points making up the
polygon. To create these vertex arrays in Python, you can create lists and
cast them to ctypes arrays which can be passed directly to the function::
x_coords = [5, 5, 15, 15]
y_coords = [5, 10, 10, 5]
vx = (ctypes.c_int16 * len(x_coords))(*x_coords)
vy = (ctypes.c_int16 * len(y_coords))(*y_coords)
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the polygon's vertices.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the polygon's vertices.
n (int): The number of vertices in the polygon.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["polygonColor"](renderer, vx, vy, n, color)
def polygonRGBA(renderer, vx, vy, n, r, g, b, a):
"""Draws an unfilled polygon to the renderer in a given color.
See :func:`polygonColor` for more information on usage.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the polygon's vertices.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the polygon's vertices.
n (int): The number of vertices in the polygon.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["polygonRGBA"](renderer, vx, vy, n, r, g, b, a)
def aapolygonColor(renderer, vx, vy, n, color):
"""Draws an anti-aliased unfilled polygon to the renderer in a given color.
See :func:`polygonColor` for more information on usage.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the polygon's vertices.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the polygon's vertices.
n (int): The number of vertices in the polygon.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aapolygonColor"](renderer, vx, vy, n, color)
def aapolygonRGBA(renderer, vx, vy, n, r, g, b, a):
"""Draws an anti-aliased unfilled polygon to the renderer in a given color.
See :func:`polygonColor` for more information on usage.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the polygon's vertices.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the polygon's vertices.
n (int): The number of vertices in the polygon.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["aapolygonRGBA"](renderer, vx, vy, n, r, g, b, a)
def filledPolygonColor(renderer, vx, vy, n, color):
"""Draws a filled polygon to the renderer in a given color.
See :func:`polygonColor` for more information on usage.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the polygon's vertices.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the polygon's vertices.
n (int): The number of vertices in the polygon.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledPolygonColor"](renderer, vx, vy, n, color)
def filledPolygonRGBA(renderer, vx, vy, n, r, g, b, a):
"""Draws a filled polygon to the renderer in a given color.
See :func:`polygonColor` for more information on usage.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the polygon's vertices.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the polygon's vertices.
n (int): The number of vertices in the polygon.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["filledPolygonRGBA"](renderer, vx, vy, n, r, g, b, a)
def texturedPolygon(renderer, vx, vy, n, texture, texture_dx, texture_dy):
"""Draws a polygon to the renderer with a given texture.
The location of the texture is relative to the top-left corner of the
renderer, as opposed to being relative to the polygon itself. As such,
both the vertex coordinates and texture coordinates need to be adjusted
equally to render a polygon with the same texture placement at a different
location.
The texture must be associated with the same renderer used to draw the
polygon.
See :func:`polygonColor` for more information on usage.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the polygon's vertices.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the polygon's vertices.
n (int): The number of vertices in the polygon.
texture (:obj:`SDL_Texture`): The texture with which to fill the
polygon.
texture_dx (int): The X offset of the texture relative to the top-left
corner of the renderer.
texture_dy (int): The Y offset of the texture relative to the top-left
corner of the renderer.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["texturedPolygon"](
renderer, vx, vy, n, texture, texture_dx, texture_dy
)
def bezierColor(renderer, vx, vy, n, s, color):
"""Draws a Bezier curve to the renderer in a given color.
The first and last vertex are the start and end points of the Bezier,
respectively, with the points in between defining the control points of the
curve. For example, a 3rd order (i.e. cubic) Bezier would be defined using
4 vertices, with the two middle vertices being the control points.
See :func:`polygonColor` for more information on creating the vertex arrays
for this function.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the points of the Bezier curve.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the points of the Bezier curve.
n (int): The number of points in the bezier curve (minimum of 3).
s (int): The number of interpolation steps to use when drawing the
curve (minimum of 2). The higher the value, the smoother the curve.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["bezierColor"](renderer, vx, vy, n, s, color)
def bezierRGBA(renderer, vx, vy, n, s, r, g, b, a):
"""Draws a Bezier curve to the renderer in a given color.
See :func:`bezierColor` for more details on usage.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the X coordinates
of the points of the Bezier curve.
vx (POINTER(:obj:`~ctypes.c_int16`)): Array containing the Y coordinates
of the points of the Bezier curve.
n (int): The number of points in the bezier curve (minimum of 3).
s (int): The number of interpolation steps to use when drawing the
curve (minimum of 2). The higher the value, the smoother the curve.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["bezierRGBA"](renderer, vx, vy, n, s, r, g, b, a)
def gfxPrimitivesSetFont(fontdata, cw, ch):
"""Sets or resets the current global GFX font.
The SDL_gfx library uses its own special format for bitmap fonts. Basically,
fonts are byte arrays where each glyph is made up of the same number of
bytes (as defined by the ``cw`` and ``ch`` arguments). The bytes are used as
a binary bitmask with 1s indicating the pixels of the character and 0s
indicating the transparent background. For example, the following is the
definition of the capital H glyph in the default font:
.. code-block:: c
/*
* 72 0x48 'H'
*/
0xc6, /* 11000110 */
0xc6, /* 11000110 */
0xc6, /* 11000110 */
0xfe, /* 11111110 */
0xc6, /* 11000110 */
0xc6, /* 11000110 */
0xc6, /* 11000110 */
0x00, /* 00000000 */
Each font must contain glyphs for all 256 ASCII characters. Since this is
a pretty painful format for defining your own fonts, you can load and use
any of the predefined SDL_gfx fonts from the following link:
https://github.com/ferzkopp/SDL_gfx/tree/master/Fonts
If no font has been set, SDL_gfx defaults to rendering with a built-in 8x8
pixel font.
.. note::
If anyone comes up with a way of converting standard bitmap fonts into
the SDL_gfx format, please let us know! That would be incredibly cool
and handy.
Args:
fontdata (:obj:`~ctypes.c_void_p`): A pointer to the start of the array
containing the new global font data, or a null pointer to reset the
global font to the default 8x8 font.
cw (int): The width (in bytes) of each character of the font. Ignored if
``fontdata`` is null.
ch (int): The height (in bytes) of each character of the font. Ignored
if ``fontdata`` is null.
"""
return _funcs["gfxPrimitivesSetFont"](fontdata, cw, ch)
def gfxPrimitivesSetFontRotation(rotation):
"""Sets the global character rotation for GFX font rendering.
Characters can only be rotated in 90 degree increments. Calling this
function will reset the character cache.
Args:
rotation (int): The number of clockwise 90-degree rotations to apply to
font characters when rendering text.
"""
return _funcs["gfxPrimitivesSetFontRotation"](rotation)
def characterColor(renderer, x, y, c, color):
"""Draws a single character with the current GFX font to the renderer.
Python characters can be converted to ASCII integers for use with this
function using the built-in :func:`ord` function (e.g. ``ord(u"A")``).
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the upper-left corner of the character.
y (int): The Y coordinate of the upper-left corner of the character.
c (int): The ASCII number (from 0 to 255) of the character.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["characterColor"](renderer, x, y, c, color)
def characterRGBA(renderer, x, y, c, r, g, b, a):
"""Draws a single character with the current GFX font to the renderer.
See :func:`characterColor` for more usage information.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the upper-left corner of the character.
y (int): The Y coordinate of the upper-left corner of the character.
c (int): The ASCII number (from 0 to 255) of the character.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["characterRGBA"](renderer, x, y, c, r, g, b, a)
def stringColor(renderer, x, y, s, color):
"""Draws an ASCII string with the current GFX font to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the upper-left corner of the string.
y (int): The Y coordinate of the upper-left corner of the string.
s (bytes): The ASCII-encoded bytestring of text to render.
color (int): The color to draw with as a 32-bit ``0xRRGGBBAA`` integer
(e.g. ``0xFF0000FF`` for solid red).
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["stringColor"](renderer, x, y, s, color)
def stringRGBA(renderer, x, y, s, r, g, b, a):
"""Draws an ASCII string with the current GFX font to the renderer.
If the rendering color has any transparency, blending will be enabled.
Args:
renderer (:obj:`SDL_Renderer`): The renderer to draw on.
x (int): The X coordinate of the upper-left corner of the string.
y (int): The Y coordinate of the upper-left corner of the string.
s (bytes): The ASCII-encoded bytestring of text to render.
r (int): The red value (from 0 to 255) of the color to draw with.
g (int): The green value (from 0 to 255) of the color to draw with.
b (int): The blue value (from 0 to 255) of the color to draw with.
a (int): The alpha value (from 0 to 255) of the color to draw with.
Returns:
int: 0 on success, or -1 on failure.
"""
return _funcs["stringRGBA"](renderer, x, y, s, r, g, b, a)
def rotozoomSurface(src, angle, zoom, smooth):
"""Rotates & zooms a surface.
Rotates and zooms an :obj:`SDL_Surface` to a new output surface, with
optional anti-aliasing. If the surface is not 8-bit or 32-bit RGBA/ABGR, it
will be converted into a 32-bit RGBA format on the fly.
Args:
src (:obj:`SDL_Surface`): The surface to rotate and zoom.
angle (float): The angle to rotate the surface (in degrees).
zoom (float): The scaling factor for the surface.
smooth (int): If set to 1, the output image will be anti-aliased. If set
to 0, no anti-aliasing will be performed. Must be either 0 or 1.
Returns:
:obj:`SDL_Surface`: A new output surface with zoom & rotation applied.
"""
return _funcs["rotozoomSurface"](src, angle, zoom, smooth)
def rotozoomSurfaceXY(src, angle, zoomx, zoomy, smooth):
"""Rotates & zooms a surface with different x & y scaling factors.
Rotates and zooms an :obj:`SDL_Surface` to a new output surface, with
optional anti-aliasing. If the surface is not 8-bit or 32-bit RGBA/ABGR, it
will be converted into a 32-bit RGBA format on the fly.
Args:
src (:obj:`SDL_Surface`): The surface to rotate and zoom.
angle (float): The angle to rotate the surface (in degrees).
zoomx (float): The x-axis (horizontal) scaling factor.
zoomy (float): The y-axis (vertical) scaling factor.
smooth (int): If set to 1, the output image will be anti-aliased. If set
to 0, no anti-aliasing will be performed. Must be either 0 or 1.
Returns:
:obj:`SDL_Surface`: A new output surface with zoom & rotation applied.
"""
return _funcs["rotozoomSurfaceXY"](src, angle, zoomx, zoomy, smooth)
def rotozoomSurfaceSize(width, height, angle, zoom, dstwidth, dstheight):
"""Returns the output surface size of a :func:`rotozoomSurface` call.
This function outputs the calculated height and width by reference to the
``dstwidth`` and ``dstheight`` arguments, and does not return any value
itself.
Args:
width (int): The width (in pixels) of the source surface.
height (int): The height (in pixels) of the source surface.
angle (float): The angle to rotate the surface (in degrees).
zoom (float): The scaling factor for the surface.
dstwidth (byref(`c_int`)): A reference to the ctypes int where the
calculated width of the output surface will be stored.
dstheight (byref(`c_int`)): A reference to the ctypes int where the
calculated height of the output surface will be stored.
"""
return _funcs["rotozoomSurfaceSize"](
width, height, angle, zoom, dstwidth, dstheight
)
def rotozoomSurfaceSizeXY(width, height, angle, zoomx, zoomy, dstwidth, dstheight):
"""Returns the output surface size of a :func:`rotozoomSurfaceXY` call.
This function outputs the calculated height and width by reference to the
``dstwidth`` and ``dstheight`` arguments, and does not return any value
itself.
Args:
width (int): The width (in pixels) of the source surface.
height (int): The height (in pixels) of the source surface.
angle (float): The angle to rotate the surface (in degrees).
zoomx (float): The x-axis (horizontal) scaling factor.
zoomy (float): The y-axis (vertical) scaling factor.
dstwidth (byref(`c_int`)): A reference to the ctypes int where the
calculated width of the output surface will be stored.
dstheight (byref(`c_int`)): A reference to the ctypes int where the
calculated height of the output surface will be stored.
"""
return _funcs["rotozoomSurfaceSizeXY"](
width, height, angle, zoomx, zoomy, dstwidth, dstheight
)
def zoomSurface(src, zoomx, zoomy, smooth):
"""Zooms a surface with different x & y scaling factors.
This function renders to a new surface, with optional anti-aliasing. If a
zoom factor is negative, the image will be flipped along that axis. If the
surface is not 8-bit or 32-bit RGBA/ABGR, it will be converted into a 32-bit
RGBA format on the fly.
Args:
src (:obj:`SDL_Surface`): The surface to zoom.
zoomx (float): The x-axis (horizontal) zoom factor.
zoomy (float): The y-axis (vertical) zoom factor.
smooth (int): If set to 1, the output image will be anti-aliased. If set
to 0, no anti-aliasing will be performed. Must be either 0 or 1.
Returns:
:obj:`SDL_Surface`: A new output surface with zoom applied.
"""
return _funcs["zoomSurface"](src, zoomx, zoomy, smooth)
def zoomSurfaceSize(width, height, zoomx, zoomy, dstwidth, dstheight):
"""Returns the output surface size of a :func:`zoomSurface` call.
This function outputs the calculated height and width by reference to the
``dstwidth`` and ``dstheight`` arguments, and does not return any value
itself.
Args:
width (int): The width (in pixels) of the source surface.
height (int): The height (in pixels) of the source surface.
zoomx (float): The x-axis (horizontal) scaling factor.
zoomy (float): The y-axis (vertical) scaling factor.
dstwidth (byref(`c_int`)): A reference to the ctypes int where the
calculated width of the output surface will be stored.
dstheight (byref(`c_int`)): A reference to the ctypes int where the
calculated height of the output surface will be stored.
"""
return _funcs["zoomSurfaceSize"](width, height, zoomx, zoomy, dstwidth, dstheight)
def shrinkSurface(src, factorx, factory):
"""Shrinks a surface by an integer ratio using averaging.
This function renders to a new surface, meaning that the original surface is
not modified. The output surface is anti-aliased by averaging the source
RGBA information. If the surface is not 8-bit or 32-bit RGBA/ABGR, it will
be converted into a 32-bit RGBA format on the fly.
Args:
src (:obj:`SDL_Surface`): The surface to zoom.
factorx (int): The x-axis (horizontal) shrink factor (e.g. 2 = 2x smaller).
factory (int): The y-axis (vertical) shrink factor (e.g. 2 = 2x smaller).
Returns:
:obj:`SDL_Surface`: The new shrunken surface.
"""
return _funcs["shrinkSurface"](src, factorx, factory)
def rotateSurface90Degrees(src, numClockwiseTurns):
"""Rotates an SDL surface clockwise in increments of 90 degrees.
Faster than rotozoomer since no scanning or interpolation takes place.
Input surface must be 8-bit, 16-bit, 24-bit, or 32-bit.
Args:
src (:obj:`SDL_Surface`): The source surface to rotate.
numClockwiseTurns (int): The number of clockwise 90 degree rotations to
apply to the source.
Returns:
:obj:`SDL_Surface`: The new rotated surface, or `None` if the source
surface was not a compatible format.
"""
return _funcs["rotateSurface90Degrees"](src, numClockwiseTurns)
| 43.905988
| 135
| 0.65223
| 12,010
| 78,460
| 4.214738
| 0.051374
| 0.052451
| 0.045042
| 0.038721
| 0.835931
| 0.826053
| 0.807424
| 0.786819
| 0.768506
| 0.743772
| 0
| 0.033693
| 0.247973
| 78,460
| 1,786
| 136
| 43.930571
| 0.824198
| 0.698878
| 0
| 0
| 0
| 0
| 0.164069
| 0.018137
| 0
| 0
| 0
| 0
| 0
| 1
| 0.254355
| false
| 0
| 0.027875
| 0
| 0.543554
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
73a953f7372357f2643602840c3fd54084cafc7b
| 16,711
|
py
|
Python
|
mayan/apps/documents/tests/test_document_version_views.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 4
|
2021-09-02T00:16:30.000Z
|
2021-09-09T22:25:15.000Z
|
mayan/apps/documents/tests/test_document_version_views.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 86
|
2021-09-01T23:53:02.000Z
|
2021-09-20T02:25:10.000Z
|
mayan/apps/documents/tests/test_document_version_views.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 70
|
2021-09-01T12:54:51.000Z
|
2022-02-16T00:53:18.000Z
|
from mayan.apps.file_caching.events import event_cache_partition_purged
from mayan.apps.file_caching.models import CachePartitionFile
from mayan.apps.file_caching.permissions import permission_cache_partition_purge
from mayan.apps.file_caching.tests.mixins import CachePartitionViewTestMixin
from mayan.apps.messaging.events import event_message_created
from mayan.apps.messaging.models import Message
from mayan.apps.storage.events import event_download_file_created
from mayan.apps.storage.models import DownloadFile
from ..events import (
event_document_version_edited, event_document_version_exported,
event_document_viewed
)
from ..permissions import (
permission_document_version_edit, permission_document_version_export,
permission_document_version_print, permission_document_version_view
)
from .base import (
GenericDocumentViewTestCase, GenericTransactionDocumentViewTestCase
)
from .mixins.document_version_mixins import (
DocumentVersionTestMixin, DocumentVersionViewTestMixin
)
class DocumentVersionViewTestCase(
DocumentVersionTestMixin, DocumentVersionViewTestMixin,
GenericDocumentViewTestCase
):
def test_document_version_active_view_no_permission(self):
self._create_test_document_version()
self.test_document.versions.first().active_set()
self._clear_events()
response = self._request_test_document_version_active_view()
self.assertEqual(response.status_code, 404)
self.test_document_version.refresh_from_db()
self.assertFalse(self.test_document_version.active)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_active_view_with_access(self):
self._create_test_document_version()
self.test_document.versions.first().active_set()
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
self._clear_events()
response = self._request_test_document_version_active_view()
self.assertEqual(response.status_code, 302)
self.test_document_version.refresh_from_db()
self.assertTrue(self.test_document_version.active)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document)
self.assertEqual(events[0].actor, self.test_document_version)
self.assertEqual(events[0].target, self.test_document_version)
self.assertEqual(events[0].verb, event_document_version_edited.id)
def test_trashed_document_version_active_view_with_access(self):
self._create_test_document_version()
self.test_document.versions.first().active_set()
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_version_active_view()
self.assertEqual(response.status_code, 404)
self.test_document_version.refresh_from_db()
self.assertFalse(self.test_document_version.active)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_edit_view_no_permission(self):
document_version_comment = self.test_document_version.comment
self._clear_events()
response = self._request_test_document_version_edit_view()
self.assertEqual(response.status_code, 404)
self.test_document_version.refresh_from_db()
self.assertEqual(
self.test_document_version.comment,
document_version_comment
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_edit_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
document_version_comment = self.test_document_version.comment
self._clear_events()
response = self._request_test_document_version_edit_view()
self.assertEqual(response.status_code, 302)
self.test_document_version.refresh_from_db()
self.assertNotEqual(
self.test_document_version.comment,
document_version_comment
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document_version)
self.assertEqual(events[0].verb, event_document_version_edited.id)
def test_trashed_document_version_edit_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
document_version_comment = self.test_document_version.comment
self.test_document.delete()
self._clear_events()
response = self._request_test_document_version_edit_view()
self.assertEqual(response.status_code, 404)
self.test_document_version.refresh_from_db()
self.assertEqual(
self.test_document_version.comment,
document_version_comment
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_list_view_no_permission(self):
self._clear_events()
response = self._request_test_document_version_list_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_list_view_with_access(self):
self.grant_access(
obj=self.test_document,
permission=permission_document_version_view
)
self._clear_events()
response = self._request_test_document_version_list_view()
self.assertContains(
response=response, status_code=200,
text=str(self.test_document_version)
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_version_list_view_with_access(self):
self.grant_access(
obj=self.test_document,
permission=permission_document_version_view
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_version_list_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_preview_view_no_permission(self):
self._clear_events()
response = self._request_test_document_version_preview_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_preview_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_view
)
self._clear_events()
response = self._request_test_document_version_preview_view()
self.assertContains(
response=response, status_code=200,
text=str(self.test_document_version)
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_version)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document)
self.assertEqual(events[0].verb, event_document_viewed.id)
def test_trashed_document_version_preview_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_view
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_version_preview_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_print_form_view_no_permission(self):
self._clear_events()
response = self._request_test_document_version_print_form_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_print_form_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_print
)
self._clear_events()
response = self._request_test_document_version_print_form_view()
self.assertEqual(response.status_code, 200)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_version_print_form_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_print
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_version_print_form_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_print_view_no_permission(self):
self._clear_events()
response = self._request_test_document_version_print_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_print_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_print
)
self._clear_events()
response = self._request_test_document_version_print_view()
self.assertEqual(response.status_code, 200)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_version_print_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_print
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_version_print_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class DocumentVersionExportViewTestCase(
DocumentVersionTestMixin, DocumentVersionViewTestMixin,
GenericTransactionDocumentViewTestCase
):
"""
Use a transaction test case to test the transaction.on_commit code
of the export task. Use convert back to a normal test case and use
`captureOnCommitCallbacks` when upgraded to Django 3.2:
https://github.com/django/django/commit/e906ff6fca291fc0bfa0d52f05817ee9dae0335d
"""
def test_document_version_export_view_no_permission(self):
download_file_count = DownloadFile.objects.count()
self._clear_events()
response = self._request_test_document_version_export_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
DownloadFile.objects.count(), download_file_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_export_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_export
)
download_file_count = DownloadFile.objects.count()
self._clear_events()
response = self._request_test_document_version_export_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
DownloadFile.objects.count(), download_file_count + 1
)
test_download_file = DownloadFile.objects.first()
test_message = Message.objects.first()
events = self._get_test_events()
self.assertEqual(events.count(), 3)
self.assertEqual(events[0].action_object, self.test_document_version)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, test_download_file)
self.assertEqual(events[0].verb, event_download_file_created.id)
self.assertEqual(events[1].action_object, test_download_file)
self.assertEqual(events[1].actor, self._test_case_user)
self.assertEqual(events[1].target, self.test_document_version)
self.assertEqual(events[1].verb, event_document_version_exported.id)
self.assertEqual(events[2].action_object, None)
self.assertEqual(events[2].actor, test_message)
self.assertEqual(events[2].target, test_message)
self.assertEqual(events[2].verb, event_message_created.id)
def test_trashed_document_version_export_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_export
)
download_file_count = DownloadFile.objects.count()
self.test_document.delete()
self._clear_events()
response = self._request_test_document_version_export_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
DownloadFile.objects.count(), download_file_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class DocumentVersionCachePurgeViewTestCase(
CachePartitionViewTestMixin, GenericDocumentViewTestCase
):
def test_document_version_cache_purge_no_permission(self):
self.test_object = self.test_document_version
self._inject_test_object_content_type()
self.test_document_version.version_pages.first().generate_image()
test_document_version_cache_partitions = self.test_document_version.get_cache_partitions()
cache_partition_version_count = CachePartitionFile.objects.filter(
partition__in=test_document_version_cache_partitions
).count()
self._clear_events()
response = self._request_test_object_file_cache_partition_purge_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
CachePartitionFile.objects.filter(
partition__in=test_document_version_cache_partitions
).count(), cache_partition_version_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_cache_purge_with_access(self):
self.test_object = self.test_document_version
self._inject_test_object_content_type()
self.grant_access(
obj=self.test_document_version,
permission=permission_cache_partition_purge
)
self.test_document_version.version_pages.first().generate_image()
test_document_version_cache_partitions = self.test_document_version.get_cache_partitions()
cache_partition_version_count = CachePartitionFile.objects.filter(
partition__in=test_document_version_cache_partitions
).count()
cache_partitions = self.test_document_version.get_cache_partitions()
self._clear_events()
response = self._request_test_object_file_cache_partition_purge_view()
self.assertEqual(response.status_code, 302)
self.assertNotEqual(
CachePartitionFile.objects.filter(
partition__in=test_document_version_cache_partitions
).count(), cache_partition_version_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 2)
self.assertEqual(events[0].action_object, self.test_document_version)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, cache_partitions[0])
self.assertEqual(events[0].verb, event_cache_partition_purged.id)
self.assertEqual(events[1].action_object, self.test_document_version)
self.assertEqual(events[1].actor, self._test_case_user)
self.assertEqual(events[1].target, cache_partitions[1])
self.assertEqual(events[1].verb, event_cache_partition_purged.id)
| 34.598344
| 98
| 0.713901
| 1,888
| 16,711
| 5.889301
| 0.066208
| 0.172677
| 0.1555
| 0.093084
| 0.855742
| 0.841173
| 0.806997
| 0.798093
| 0.786761
| 0.773001
| 0
| 0.011328
| 0.207648
| 16,711
| 482
| 99
| 34.670124
| 0.828412
| 0.016157
| 0
| 0.707246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257971
| 1
| 0.066667
| false
| 0
| 0.034783
| 0
| 0.110145
| 0.049275
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73d7fe9e79fd38dba8f2c1e49962a92f84b6c1da
| 195,497
|
py
|
Python
|
rivendell/mitre/attackNavigator.py
|
ezaspy/elrond
|
3e358f20112be403b895d873a7e3892ce4181d8b
|
[
"MIT"
] | 1
|
2021-03-29T08:05:31.000Z
|
2021-03-29T08:05:31.000Z
|
rivendell/mitre/attackNavigator.py
|
ezaspy/elrond
|
3e358f20112be403b895d873a7e3892ce4181d8b
|
[
"MIT"
] | 17
|
2020-11-24T11:00:38.000Z
|
2021-05-18T18:20:21.000Z
|
rivendell/mitre/attackNavigator.py
|
ezaspy/elrond
|
3e358f20112be403b895d873a7e3892ce4181d8b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3 -tt
def doAttackNavigator(case, nav_list, eachtechnique):
nav_pairs = {
"T1001": "{\n \"techniqueID\": \"T1001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1001.001": "{\n \"techniqueID\": \"T1001.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1001.002": "{\n \"techniqueID\": \"T1001.002\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1001.003": "{\n \"techniqueID\": \"T1001.003\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003": "{\n \"techniqueID\": \"T1003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.001": "{\n \"techniqueID\": \"T1003.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.002": "{\n \"techniqueID\": \"T1003.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.003": "{\n \"techniqueID\": \"T1003.003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.004": "{\n \"techniqueID\": \"T1003.004\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.005": "{\n \"techniqueID\": \"T1003.005\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.006": "{\n \"techniqueID\": \"T1003.006\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.007": "{\n \"techniqueID\": \"T1003.007\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1003.008": "{\n \"techniqueID\": \"T1003.008\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1005": "{\n \"techniqueID\": \"T1005\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1006": "{\n \"techniqueID\": \"T1006\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1007": "{\n \"techniqueID\": \"T1007\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1008": "{\n \"techniqueID\": \"T1008\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1010": "{\n \"techniqueID\": \"T1010\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1011": "{\n \"techniqueID\": \"T1011\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1011.001": "{\n \"techniqueID\": \"T1011.001\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1012": "{\n \"techniqueID\": \"T1012\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1014": "{\n \"techniqueID\": \"T1014\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1016": "{\n \"techniqueID\": \"T1016\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1016.001": "{\n \"techniqueID\": \"T1016.001\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1018": "{\n \"techniqueID\": \"T1018\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1020": "{\n \"techniqueID\": \"T1020\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1020.001": "{\n \"techniqueID\": \"T1020.001\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1021": "{\n \"techniqueID\": \"T1021\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1021.001": "{\n \"techniqueID\": \"T1021.001\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1021.002": "{\n \"techniqueID\": \"T1021.002\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1021.003": "{\n \"techniqueID\": \"T1021.003\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1021.004": "{\n \"techniqueID\": \"T1021.004\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1021.005": "{\n \"techniqueID\": \"T1021.005\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1021.006": "{\n \"techniqueID\": \"T1021.006\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1025": "{\n \"techniqueID\": \"T1025\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1027": "{\n \"techniqueID\": \"T1027\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1027.001": "{\n \"techniqueID\": \"T1027.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1027.002": "{\n \"techniqueID\": \"T1027.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1027.003": "{\n \"techniqueID\": \"T1027.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1027.004": "{\n \"techniqueID\": \"T1027.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1027.005": "{\n \"techniqueID\": \"T1027.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1029": "{\n \"techniqueID\": \"T1029\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1030": "{\n \"techniqueID\": \"T1030\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1033": "{\n \"techniqueID\": \"T1033\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1036": "{\n \"techniqueID\": \"T1036\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1036.001": "{\n \"techniqueID\": \"T1036.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1036.002": "{\n \"techniqueID\": \"T1036.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1036.003": "{\n \"techniqueID\": \"T1036.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1036.004": "{\n \"techniqueID\": \"T1036.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1036.005": "{\n \"techniqueID\": \"T1036.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1036.006": "{\n \"techniqueID\": \"T1036.006\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1037": "{\n \"techniqueID\": \"T1037\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1037.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1037.001": "{\n \"techniqueID\": \"T1037.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1037.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1037.002": "{\n \"techniqueID\": \"T1037.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1037.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1037.003": "{\n \"techniqueID\": \"T1037.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1037.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1037.004": "{\n \"techniqueID\": \"T1037.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1037.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1037.005": "{\n \"techniqueID\": \"T1037.005\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1037.005\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1039": "{\n \"techniqueID\": \"T1039\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1040": "{\n \"techniqueID\": \"T1040\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1040\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1041": "{\n \"techniqueID\": \"T1041\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1046": "{\n \"techniqueID\": \"T1046\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1047": "{\n \"techniqueID\": \"T1047\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1048": "{\n \"techniqueID\": \"T1048\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1048.001": "{\n \"techniqueID\": \"T1048.001\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1048.002": "{\n \"techniqueID\": \"T1048.002\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1048.003": "{\n \"techniqueID\": \"T1048.003\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1049": "{\n \"techniqueID\": \"T1049\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1052": "{\n \"techniqueID\": \"T1052\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1052.001": "{\n \"techniqueID\": \"T1052.001\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053": "{\n \"techniqueID\": \"T1053\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053.001": "{\n \"techniqueID\": \"T1053.001\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053.002": "{\n \"techniqueID\": \"T1053.002\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053.003": "{\n \"techniqueID\": \"T1053.003\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053.004": "{\n \"techniqueID\": \"T1053.004\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053.005": "{\n \"techniqueID\": \"T1053.005\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.005\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.005\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053.006": "{\n \"techniqueID\": \"T1053.006\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.006\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.006\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1053.007": "{\n \"techniqueID\": \"T1053.007\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.007\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1053.007\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055": "{\n \"techniqueID\": \"T1055\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.001": "{\n \"techniqueID\": \"T1055.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.002": "{\n \"techniqueID\": \"T1055.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.003": "{\n \"techniqueID\": \"T1055.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.004": "{\n \"techniqueID\": \"T1055.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.005": "{\n \"techniqueID\": \"T1055.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.005\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.008": "{\n \"techniqueID\": \"T1055.008\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.008\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.009": "{\n \"techniqueID\": \"T1055.009\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.009\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.011": "{\n \"techniqueID\": \"T1055.011\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.011\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.012": "{\n \"techniqueID\": \"T1055.012\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.012\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.013": "{\n \"techniqueID\": \"T1055.013\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.013\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1055.014": "{\n \"techniqueID\": \"T1055.014\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1055.014\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1056": "{\n \"techniqueID\": \"T1056\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1056\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1056.001": "{\n \"techniqueID\": \"T1056.001\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1056.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1056.002": "{\n \"techniqueID\": \"T1056.002\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1056.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1056.003": "{\n \"techniqueID\": \"T1056.003\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1056.003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1056.004": "{\n \"techniqueID\": \"T1056.004\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1056.004\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1057": "{\n \"techniqueID\": \"T1057\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059": "{\n \"techniqueID\": \"T1059\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.001": "{\n \"techniqueID\": \"T1059.001\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.002": "{\n \"techniqueID\": \"T1059.002\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.003": "{\n \"techniqueID\": \"T1059.003\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.004": "{\n \"techniqueID\": \"T1059.004\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.005": "{\n \"techniqueID\": \"T1059.005\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.006": "{\n \"techniqueID\": \"T1059.006\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.007": "{\n \"techniqueID\": \"T1059.007\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1059.008": "{\n \"techniqueID\": \"T1059.008\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1068": "{\n \"techniqueID\": \"T1068\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1069": "{\n \"techniqueID\": \"T1069\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1069.001": "{\n \"techniqueID\": \"T1069.001\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1069.002": "{\n \"techniqueID\": \"T1069.002\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1069.003": "{\n \"techniqueID\": \"T1069.003\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1070": "{\n \"techniqueID\": \"T1070\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1070.001": "{\n \"techniqueID\": \"T1070.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1070.002": "{\n \"techniqueID\": \"T1070.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1070.003": "{\n \"techniqueID\": \"T1070.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1070.004": "{\n \"techniqueID\": \"T1070.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1070.005": "{\n \"techniqueID\": \"T1070.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1070.006": "{\n \"techniqueID\": \"T1070.006\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1071": "{\n \"techniqueID\": \"T1071\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1071.001": "{\n \"techniqueID\": \"T1071.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1071.002": "{\n \"techniqueID\": \"T1071.002\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1071.003": "{\n \"techniqueID\": \"T1071.003\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1071.004": "{\n \"techniqueID\": \"T1071.004\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1072": "{\n \"techniqueID\": \"T1072\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1072\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1074": "{\n \"techniqueID\": \"T1074\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1074.001": "{\n \"techniqueID\": \"T1074.001\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1074.002": "{\n \"techniqueID\": \"T1074.002\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078": "{\n \"techniqueID\": \"T1078\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.001\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078": "{\n \"techniqueID\": \"T1078\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.001": "{\n \"techniqueID\": \"T1078.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.001\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.001": "{\n \"techniqueID\": \"T1078.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.002": "{\n \"techniqueID\": \"T1078.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.002\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.002": "{\n \"techniqueID\": \"T1078.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.003": "{\n \"techniqueID\": \"T1078.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.003\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.003": "{\n \"techniqueID\": \"T1078.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.004": "{\n \"techniqueID\": \"T1078.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.004\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1078.004": "{\n \"techniqueID\": \"T1078.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1078.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1080": "{\n \"techniqueID\": \"T1080\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1082": "{\n \"techniqueID\": \"T1082\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1083": "{\n \"techniqueID\": \"T1083\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1087": "{\n \"techniqueID\": \"T1087\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1087.001": "{\n \"techniqueID\": \"T1087.001\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1087.002": "{\n \"techniqueID\": \"T1087.002\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1087.003": "{\n \"techniqueID\": \"T1087.003\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1087.004": "{\n \"techniqueID\": \"T1087.004\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1090": "{\n \"techniqueID\": \"T1090\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1090.001": "{\n \"techniqueID\": \"T1090.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1090.002": "{\n \"techniqueID\": \"T1090.002\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1090.003": "{\n \"techniqueID\": \"T1090.003\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1090.004": "{\n \"techniqueID\": \"T1090.004\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1091": "{\n \"techniqueID\": \"T1091\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1091\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1092": "{\n \"techniqueID\": \"T1092\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1095": "{\n \"techniqueID\": \"T1095\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1098": "{\n \"techniqueID\": \"T1098\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1098.001": "{\n \"techniqueID\": \"T1098.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1098.002": "{\n \"techniqueID\": \"T1098.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1098.003": "{\n \"techniqueID\": \"T1098.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1098.004": "{\n \"techniqueID\": \"T1098.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1102": "{\n \"techniqueID\": \"T1102\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1102.001": "{\n \"techniqueID\": \"T1102.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1102.002": "{\n \"techniqueID\": \"T1102.002\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1102.003": "{\n \"techniqueID\": \"T1102.003\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1104": "{\n \"techniqueID\": \"T1104\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1105": "{\n \"techniqueID\": \"T1105\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1106": "{\n \"techniqueID\": \"T1106\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1110": "{\n \"techniqueID\": \"T1110\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1110.001": "{\n \"techniqueID\": \"T1110.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1110.002": "{\n \"techniqueID\": \"T1110.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1110.003": "{\n \"techniqueID\": \"T1110.003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1110.004": "{\n \"techniqueID\": \"T1110.004\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1111": "{\n \"techniqueID\": \"T1111\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1112": "{\n \"techniqueID\": \"T1112\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1113": "{\n \"techniqueID\": \"T1113\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1114": "{\n \"techniqueID\": \"T1114\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1114.001": "{\n \"techniqueID\": \"T1114.001\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1114.002": "{\n \"techniqueID\": \"T1114.002\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1114.003": "{\n \"techniqueID\": \"T1114.003\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1115": "{\n \"techniqueID\": \"T1115\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1119": "{\n \"techniqueID\": \"T1119\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1120": "{\n \"techniqueID\": \"T1120\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1123": "{\n \"techniqueID\": \"T1123\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1124": "{\n \"techniqueID\": \"T1124\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1125": "{\n \"techniqueID\": \"T1125\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1127": "{\n \"techniqueID\": \"T1127\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1127.001": "{\n \"techniqueID\": \"T1127.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1129": "{\n \"techniqueID\": \"T1129\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1132": "{\n \"techniqueID\": \"T1132\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1132.001": "{\n \"techniqueID\": \"T1132.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1132.002": "{\n \"techniqueID\": \"T1132.002\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1133": "{\n \"techniqueID\": \"T1133\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1133\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1134": "{\n \"techniqueID\": \"T1134\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1134.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1134.001": "{\n \"techniqueID\": \"T1134.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1134.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1134.002": "{\n \"techniqueID\": \"T1134.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1134.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1134.003": "{\n \"techniqueID\": \"T1134.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1134.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1134.004": "{\n \"techniqueID\": \"T1134.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1134.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1134.005": "{\n \"techniqueID\": \"T1134.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1134.005\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1135": "{\n \"techniqueID\": \"T1135\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1136": "{\n \"techniqueID\": \"T1136\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1136.001": "{\n \"techniqueID\": \"T1136.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1136.002": "{\n \"techniqueID\": \"T1136.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1136.003": "{\n \"techniqueID\": \"T1136.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1137": "{\n \"techniqueID\": \"T1137\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1137.001": "{\n \"techniqueID\": \"T1137.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1137.002": "{\n \"techniqueID\": \"T1137.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1137.003": "{\n \"techniqueID\": \"T1137.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1137.004": "{\n \"techniqueID\": \"T1137.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1137.005": "{\n \"techniqueID\": \"T1137.005\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1137.006": "{\n \"techniqueID\": \"T1137.006\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1140": "{\n \"techniqueID\": \"T1140\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1176": "{\n \"techniqueID\": \"T1176\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1185": "{\n \"techniqueID\": \"T1185\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1187": "{\n \"techniqueID\": \"T1187\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1189": "{\n \"techniqueID\": \"T1189\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1190": "{\n \"techniqueID\": \"T1190\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1195": "{\n \"techniqueID\": \"T1195\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1195.001": "{\n \"techniqueID\": \"T1195.001\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1195.002": "{\n \"techniqueID\": \"T1195.002\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1195.003": "{\n \"techniqueID\": \"T1195.003\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1197": "{\n \"techniqueID\": \"T1197\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1197\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1199": "{\n \"techniqueID\": \"T1199\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1200": "{\n \"techniqueID\": \"T1200\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1201": "{\n \"techniqueID\": \"T1201\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1202": "{\n \"techniqueID\": \"T1202\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1203": "{\n \"techniqueID\": \"T1203\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1204": "{\n \"techniqueID\": \"T1204\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1204.001": "{\n \"techniqueID\": \"T1204.001\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1204.002": "{\n \"techniqueID\": \"T1204.002\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1204.003": "{\n \"techniqueID\": \"T1204.003\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1205": "{\n \"techniqueID\": \"T1205\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1205.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1205": "{\n \"techniqueID\": \"T1205\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1205.001": "{\n \"techniqueID\": \"T1205.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1205.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1205.001": "{\n \"techniqueID\": \"T1205.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1207": "{\n \"techniqueID\": \"T1207\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1210": "{\n \"techniqueID\": \"T1210\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1211": "{\n \"techniqueID\": \"T1211\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1212": "{\n \"techniqueID\": \"T1212\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1213": "{\n \"techniqueID\": \"T1213\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1213.001": "{\n \"techniqueID\": \"T1213.001\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1213.002": "{\n \"techniqueID\": \"T1213.002\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1216": "{\n \"techniqueID\": \"T1216\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1216.001": "{\n \"techniqueID\": \"T1216.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1217": "{\n \"techniqueID\": \"T1217\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218": "{\n \"techniqueID\": \"T1218\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.001": "{\n \"techniqueID\": \"T1218.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.002": "{\n \"techniqueID\": \"T1218.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.003": "{\n \"techniqueID\": \"T1218.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.004": "{\n \"techniqueID\": \"T1218.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.005": "{\n \"techniqueID\": \"T1218.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.007": "{\n \"techniqueID\": \"T1218.007\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.008": "{\n \"techniqueID\": \"T1218.008\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.009": "{\n \"techniqueID\": \"T1218.009\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.010": "{\n \"techniqueID\": \"T1218.010\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.011": "{\n \"techniqueID\": \"T1218.011\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1218.012": "{\n \"techniqueID\": \"T1218.012\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1219": "{\n \"techniqueID\": \"T1219\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1220": "{\n \"techniqueID\": \"T1220\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1221": "{\n \"techniqueID\": \"T1221\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1222": "{\n \"techniqueID\": \"T1222\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1222.001": "{\n \"techniqueID\": \"T1222.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1222.002": "{\n \"techniqueID\": \"T1222.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1480": "{\n \"techniqueID\": \"T1480\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1480.001": "{\n \"techniqueID\": \"T1480.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1482": "{\n \"techniqueID\": \"T1482\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1484": "{\n \"techniqueID\": \"T1484\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1484.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1484.001": "{\n \"techniqueID\": \"T1484.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1484.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1484.002": "{\n \"techniqueID\": \"T1484.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1484.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1485": "{\n \"techniqueID\": \"T1485\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1486": "{\n \"techniqueID\": \"T1486\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1489": "{\n \"techniqueID\": \"T1489\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1490": "{\n \"techniqueID\": \"T1490\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1491": "{\n \"techniqueID\": \"T1491\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1491.001": "{\n \"techniqueID\": \"T1491.001\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1491.002": "{\n \"techniqueID\": \"T1491.002\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1495": "{\n \"techniqueID\": \"T1495\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1496": "{\n \"techniqueID\": \"T1496\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1497": "{\n \"techniqueID\": \"T1497\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1497\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1497.001": "{\n \"techniqueID\": \"T1497.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1497.001\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1497.002": "{\n \"techniqueID\": \"T1497.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1497.002\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1497.003": "{\n \"techniqueID\": \"T1497.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1497.003\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1498": "{\n \"techniqueID\": \"T1498\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1498.001": "{\n \"techniqueID\": \"T1498.001\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1498.002": "{\n \"techniqueID\": \"T1498.002\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1499": "{\n \"techniqueID\": \"T1499\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1499.001": "{\n \"techniqueID\": \"T1499.001\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1499.002": "{\n \"techniqueID\": \"T1499.002\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1499.003": "{\n \"techniqueID\": \"T1499.003\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1499.004": "{\n \"techniqueID\": \"T1499.004\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1505": "{\n \"techniqueID\": \"T1505\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1505.001": "{\n \"techniqueID\": \"T1505.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1505.002": "{\n \"techniqueID\": \"T1505.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1505.003": "{\n \"techniqueID\": \"T1505.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1518": "{\n \"techniqueID\": \"T1518\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1518.001": "{\n \"techniqueID\": \"T1518.001\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1525": "{\n \"techniqueID\": \"T1525\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1526": "{\n \"techniqueID\": \"T1526\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1528": "{\n \"techniqueID\": \"T1528\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1529": "{\n \"techniqueID\": \"T1529\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1530": "{\n \"techniqueID\": \"T1530\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1531": "{\n \"techniqueID\": \"T1531\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1534": "{\n \"techniqueID\": \"T1534\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1535": "{\n \"techniqueID\": \"T1535\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1537": "{\n \"techniqueID\": \"T1537\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1538": "{\n \"techniqueID\": \"T1538\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1539": "{\n \"techniqueID\": \"T1539\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1542": "{\n \"techniqueID\": \"T1542\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1542.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1542.001": "{\n \"techniqueID\": \"T1542.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1542.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1542.002": "{\n \"techniqueID\": \"T1542.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1542.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1542.003": "{\n \"techniqueID\": \"T1542.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1542.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1542.004": "{\n \"techniqueID\": \"T1542.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1542.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1542.005": "{\n \"techniqueID\": \"T1542.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1542.005\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1543": "{\n \"techniqueID\": \"T1543\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1543\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1543.001": "{\n \"techniqueID\": \"T1543.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1543.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1543.002": "{\n \"techniqueID\": \"T1543.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1543.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1543.003": "{\n \"techniqueID\": \"T1543.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1543.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1543.004": "{\n \"techniqueID\": \"T1543.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1543.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546": "{\n \"techniqueID\": \"T1546\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.001": "{\n \"techniqueID\": \"T1546.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.002": "{\n \"techniqueID\": \"T1546.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.003": "{\n \"techniqueID\": \"T1546.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.004": "{\n \"techniqueID\": \"T1546.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.005": "{\n \"techniqueID\": \"T1546.005\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.005\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.006": "{\n \"techniqueID\": \"T1546.006\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.006\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.007": "{\n \"techniqueID\": \"T1546.007\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.007\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.008": "{\n \"techniqueID\": \"T1546.008\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.008\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.009": "{\n \"techniqueID\": \"T1546.009\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.009\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.010": "{\n \"techniqueID\": \"T1546.010\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.010\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.011": "{\n \"techniqueID\": \"T1546.011\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.011\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.012": "{\n \"techniqueID\": \"T1546.012\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.012\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.013": "{\n \"techniqueID\": \"T1546.013\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.013\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.014": "{\n \"techniqueID\": \"T1546.014\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.014\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1546.015": "{\n \"techniqueID\": \"T1546.015\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1546.015\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547": "{\n \"techniqueID\": \"T1547\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.001": "{\n \"techniqueID\": \"T1547.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.002": "{\n \"techniqueID\": \"T1547.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.003": "{\n \"techniqueID\": \"T1547.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.004": "{\n \"techniqueID\": \"T1547.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.005": "{\n \"techniqueID\": \"T1547.005\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.005\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.006": "{\n \"techniqueID\": \"T1547.006\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.006\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.007": "{\n \"techniqueID\": \"T1547.007\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.007\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.008": "{\n \"techniqueID\": \"T1547.008\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.008\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.009": "{\n \"techniqueID\": \"T1547.009\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.009\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.010": "{\n \"techniqueID\": \"T1547.010\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.010\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.011": "{\n \"techniqueID\": \"T1547.011\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.011\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.012": "{\n \"techniqueID\": \"T1547.012\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.012\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.013": "{\n \"techniqueID\": \"T1547.013\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.013\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1547.014": "{\n \"techniqueID\": \"T1547.014\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1547.014\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1548": "{\n \"techniqueID\": \"T1548\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1548\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1548.001": "{\n \"techniqueID\": \"T1548.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1548.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1548.002": "{\n \"techniqueID\": \"T1548.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1548.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1548.003": "{\n \"techniqueID\": \"T1548.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1548.003\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1548.004": "{\n \"techniqueID\": \"T1548.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1548.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1550": "{\n \"techniqueID\": \"T1550\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1550\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1550.001": "{\n \"techniqueID\": \"T1550.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1550.001\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1550.002": "{\n \"techniqueID\": \"T1550.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1550.002\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1550.003": "{\n \"techniqueID\": \"T1550.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1550.003\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1550.004": "{\n \"techniqueID\": \"T1550.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1550.004\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552": "{\n \"techniqueID\": \"T1552\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552.001": "{\n \"techniqueID\": \"T1552.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552.002": "{\n \"techniqueID\": \"T1552.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552.003": "{\n \"techniqueID\": \"T1552.003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552.004": "{\n \"techniqueID\": \"T1552.004\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552.005": "{\n \"techniqueID\": \"T1552.005\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552.006": "{\n \"techniqueID\": \"T1552.006\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1552.007": "{\n \"techniqueID\": \"T1552.007\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1553": "{\n \"techniqueID\": \"T1553\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1553.001": "{\n \"techniqueID\": \"T1553.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1553.002": "{\n \"techniqueID\": \"T1553.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1553.003": "{\n \"techniqueID\": \"T1553.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1553.004": "{\n \"techniqueID\": \"T1553.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1553.005": "{\n \"techniqueID\": \"T1553.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1553.006": "{\n \"techniqueID\": \"T1553.006\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1554": "{\n \"techniqueID\": \"T1554\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1555": "{\n \"techniqueID\": \"T1555\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1555.001": "{\n \"techniqueID\": \"T1555.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1555.002": "{\n \"techniqueID\": \"T1555.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1555.003": "{\n \"techniqueID\": \"T1555.003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1555.004": "{\n \"techniqueID\": \"T1555.004\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1555.005": "{\n \"techniqueID\": \"T1555.005\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556": "{\n \"techniqueID\": \"T1556\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1556.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556": "{\n \"techniqueID\": \"T1556\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.001": "{\n \"techniqueID\": \"T1556.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1556.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.001": "{\n \"techniqueID\": \"T1556.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.002": "{\n \"techniqueID\": \"T1556.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1556.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.002": "{\n \"techniqueID\": \"T1556.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.003": "{\n \"techniqueID\": \"T1556.003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1556.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.003": "{\n \"techniqueID\": \"T1556.003\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.004": "{\n \"techniqueID\": \"T1556.004\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1556.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1556.004": "{\n \"techniqueID\": \"T1556.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1557": "{\n \"techniqueID\": \"T1557\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1557.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1557.001": "{\n \"techniqueID\": \"T1557.001\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1557.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1557.002": "{\n \"techniqueID\": \"T1557.002\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1557.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1558": "{\n \"techniqueID\": \"T1558\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1558.001": "{\n \"techniqueID\": \"T1558.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1558.002": "{\n \"techniqueID\": \"T1558.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1558.003": "{\n \"techniqueID\": \"T1558.003\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1558.004": "{\n \"techniqueID\": \"T1558.004\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1559": "{\n \"techniqueID\": \"T1559\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1559.001": "{\n \"techniqueID\": \"T1559.001\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1559.002": "{\n \"techniqueID\": \"T1559.002\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1560": "{\n \"techniqueID\": \"T1560\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1560.001": "{\n \"techniqueID\": \"T1560.001\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1560.002": "{\n \"techniqueID\": \"T1560.002\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1560.003": "{\n \"techniqueID\": \"T1560.003\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1561": "{\n \"techniqueID\": \"T1561\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1561.001": "{\n \"techniqueID\": \"T1561.001\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1561.002": "{\n \"techniqueID\": \"T1561.002\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562": "{\n \"techniqueID\": \"T1562\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562.001": "{\n \"techniqueID\": \"T1562.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562.002": "{\n \"techniqueID\": \"T1562.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562.003": "{\n \"techniqueID\": \"T1562.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562.004": "{\n \"techniqueID\": \"T1562.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562.006": "{\n \"techniqueID\": \"T1562.006\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562.007": "{\n \"techniqueID\": \"T1562.007\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1562.008": "{\n \"techniqueID\": \"T1562.008\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1563": "{\n \"techniqueID\": \"T1563\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1563.001": "{\n \"techniqueID\": \"T1563.001\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1563.002": "{\n \"techniqueID\": \"T1563.002\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564": "{\n \"techniqueID\": \"T1564\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564.001": "{\n \"techniqueID\": \"T1564.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564.002": "{\n \"techniqueID\": \"T1564.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564.003": "{\n \"techniqueID\": \"T1564.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564.004": "{\n \"techniqueID\": \"T1564.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564.005": "{\n \"techniqueID\": \"T1564.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564.006": "{\n \"techniqueID\": \"T1564.006\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1564.007": "{\n \"techniqueID\": \"T1564.007\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1565": "{\n \"techniqueID\": \"T1565\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1565.001": "{\n \"techniqueID\": \"T1565.001\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1565.002": "{\n \"techniqueID\": \"T1565.002\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1565.003": "{\n \"techniqueID\": \"T1565.003\",\n \"tactic\": \"impact\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1566": "{\n \"techniqueID\": \"T1566\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1566.001": "{\n \"techniqueID\": \"T1566.001\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1566.002": "{\n \"techniqueID\": \"T1566.002\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1566.003": "{\n \"techniqueID\": \"T1566.003\",\n \"tactic\": \"initial-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1567": "{\n \"techniqueID\": \"T1567\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1567.001": "{\n \"techniqueID\": \"T1567.001\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1567.002": "{\n \"techniqueID\": \"T1567.002\",\n \"tactic\": \"exfiltration\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1568": "{\n \"techniqueID\": \"T1568\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1568.001": "{\n \"techniqueID\": \"T1568.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1568.002": "{\n \"techniqueID\": \"T1568.002\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1568.003": "{\n \"techniqueID\": \"T1568.003\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1569": "{\n \"techniqueID\": \"T1569\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1569.001": "{\n \"techniqueID\": \"T1569.001\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1569.002": "{\n \"techniqueID\": \"T1569.002\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1570": "{\n \"techniqueID\": \"T1570\",\n \"tactic\": \"lateral-movement\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1571": "{\n \"techniqueID\": \"T1571\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1572": "{\n \"techniqueID\": \"T1572\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1573": "{\n \"techniqueID\": \"T1573\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1573.001": "{\n \"techniqueID\": \"T1573.001\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1573.002": "{\n \"techniqueID\": \"T1573.002\",\n \"tactic\": \"command-and-control\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574": "{\n \"techniqueID\": \"T1574\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574": "{\n \"techniqueID\": \"T1574\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.001": "{\n \"techniqueID\": \"T1574.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.001\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.001": "{\n \"techniqueID\": \"T1574.001\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.002": "{\n \"techniqueID\": \"T1574.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.002\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.002": "{\n \"techniqueID\": \"T1574.002\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.004": "{\n \"techniqueID\": \"T1574.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.004\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.004": "{\n \"techniqueID\": \"T1574.004\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.005": "{\n \"techniqueID\": \"T1574.005\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.005\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.005": "{\n \"techniqueID\": \"T1574.005\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.006": "{\n \"techniqueID\": \"T1574.006\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.006\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.006": "{\n \"techniqueID\": \"T1574.006\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.007": "{\n \"techniqueID\": \"T1574.007\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.007\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.007": "{\n \"techniqueID\": \"T1574.007\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.008": "{\n \"techniqueID\": \"T1574.008\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.008\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.008": "{\n \"techniqueID\": \"T1574.008\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.009": "{\n \"techniqueID\": \"T1574.009\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.009\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.009": "{\n \"techniqueID\": \"T1574.009\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.010": "{\n \"techniqueID\": \"T1574.010\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.010\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.010": "{\n \"techniqueID\": \"T1574.010\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.011": "{\n \"techniqueID\": \"T1574.011\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.011\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.011": "{\n \"techniqueID\": \"T1574.011\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.012": "{\n \"techniqueID\": \"T1574.012\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1574.012\",\n \"tactic\": \"persistence\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1574.012": "{\n \"techniqueID\": \"T1574.012\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1578": "{\n \"techniqueID\": \"T1578\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1578.001": "{\n \"techniqueID\": \"T1578.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1578.002": "{\n \"techniqueID\": \"T1578.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1578.003": "{\n \"techniqueID\": \"T1578.003\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1578.004": "{\n \"techniqueID\": \"T1578.004\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1580": "{\n \"techniqueID\": \"T1580\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1599": "{\n \"techniqueID\": \"T1599\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1599.001": "{\n \"techniqueID\": \"T1599.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1600": "{\n \"techniqueID\": \"T1600\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1600.001": "{\n \"techniqueID\": \"T1600.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1600.002": "{\n \"techniqueID\": \"T1600.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1601": "{\n \"techniqueID\": \"T1601\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1601.001": "{\n \"techniqueID\": \"T1601.001\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1601.002": "{\n \"techniqueID\": \"T1601.002\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1602": "{\n \"techniqueID\": \"T1602\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1602.001": "{\n \"techniqueID\": \"T1602.001\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1602.002": "{\n \"techniqueID\": \"T1602.002\",\n \"tactic\": \"collection\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1606": "{\n \"techniqueID\": \"T1606\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1606.001": "{\n \"techniqueID\": \"T1606.001\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1606.002": "{\n \"techniqueID\": \"T1606.002\",\n \"tactic\": \"credential-access\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1609": "{\n \"techniqueID\": \"T1609\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1610": "{\n \"techniqueID\": \"T1610\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n {\n \"techniqueID\": \"T1610\",\n \"tactic\": \"execution\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1611": "{\n \"techniqueID\": \"T1611\",\n \"tactic\": \"privilege-escalation\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1612": "{\n \"techniqueID\": \"T1612\",\n \"tactic\": \"defense-evasion\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1613": "{\n \"techniqueID\": \"T1613\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n ",
"T1614": "{\n \"techniqueID\": \"T1614\",\n \"tactic\": \"discovery\",\n \"color\": \"#00ACB4\",\n \"comment\": \"\",\n \"enabled\": true,\n \"metadata\": [],\n \"showSubtechniques\": false\n },\n "}
for technique, content in nav_pairs.items():
if eachtechnique == technique:
nav_list.append(content)
else:
pass
navlist = list(set(nav_list.copy()))
return navlist
| 384.836614
| 888
| 0.363208
| 15,129
| 195,497
| 4.69304
| 0.015665
| 0.021943
| 0.107829
| 0.116815
| 0.894649
| 0.890875
| 0.870312
| 0.870312
| 0.870312
| 0.870312
| 0
| 0.072495
| 0.380277
| 195,497
| 507
| 889
| 385.595661
| 0.513545
| 0.000128
| 0
| 0
| 0
| 0
| 0.554082
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001976
| false
| 0.001976
| 0
| 0
| 0.003953
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb4ee86d477a43f75cceb78f76487633e36df375
| 288
|
py
|
Python
|
cla_backend/apps/checker/tests/api/test_category_api.py
|
uk-gov-mirror/ministryofjustice.cla_backend
|
4d524c10e7bd31f085d9c5f7bf6e08a6bb39c0a6
|
[
"MIT"
] | 3
|
2019-10-02T15:31:03.000Z
|
2022-01-13T10:15:53.000Z
|
cla_backend/apps/checker/tests/api/test_category_api.py
|
uk-gov-mirror/ministryofjustice.cla_backend
|
4d524c10e7bd31f085d9c5f7bf6e08a6bb39c0a6
|
[
"MIT"
] | 206
|
2015-01-02T16:50:11.000Z
|
2022-02-16T20:16:05.000Z
|
cla_backend/apps/checker/tests/api/test_category_api.py
|
uk-gov-mirror/ministryofjustice.cla_backend
|
4d524c10e7bd31f085d9c5f7bf6e08a6bb39c0a6
|
[
"MIT"
] | 6
|
2015-03-23T23:08:42.000Z
|
2022-02-15T17:04:44.000Z
|
from rest_framework.test import APITestCase
from legalaid.tests.views.test_base import CLACheckerAuthBaseApiTestMixin
from legalaid.tests.views.mixins.category_api import CategoryAPIMixin
class CategoryTestCase(CLACheckerAuthBaseApiTestMixin, CategoryAPIMixin, APITestCase):
pass
| 28.8
| 86
| 0.868056
| 29
| 288
| 8.517241
| 0.62069
| 0.097166
| 0.137652
| 0.178138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086806
| 288
| 9
| 87
| 32
| 0.939164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
fb93f08ea1499ed878b8a7c8d4a72f2d6e533823
| 4,607
|
py
|
Python
|
dianping_2018032809/driver.py
|
mannuan/pyspider_script
|
f4c988912e1099eacd0322b4e9c3a87eaaaa526f
|
[
"Apache-2.0"
] | 9
|
2018-08-28T07:53:43.000Z
|
2019-07-09T07:55:52.000Z
|
dianping_2018032809/driver.py
|
mannuan/pyspider_script
|
f4c988912e1099eacd0322b4e9c3a87eaaaa526f
|
[
"Apache-2.0"
] | null | null | null |
dianping_2018032809/driver.py
|
mannuan/pyspider_script
|
f4c988912e1099eacd0322b4e9c3a87eaaaa526f
|
[
"Apache-2.0"
] | null | null | null |
#-*- coding:utf-8 -*-
import time,random,re,sys,json
from pymongo import MongoClient
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.action_chains import ActionChains
from pyvirtualdisplay import Display
def getPhantomJsWebDriver():
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11) AppleWebKit/601.1.27 (KHTML, like Gecko) Version/8.1 Safari/601.1.27")
service_args=[]
# service_args.append('--load-images=no')
# service_args.append('--disk-cache=yes')
# service_args.append('--ignore-ssl-errors=true')
driver = webdriver.PhantomJS(desired_capabilities=dcap,service_args=service_args)
driver.set_page_load_timeout(15)
driver.set_script_timeout(15)#这两种设置都进行才有效
driver.implicitly_wait(15)#隐性等待
return driver
def getPhantomJsMobileDriver():
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = (
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13C75 Safari/601.1")
service_args=[]
# service_args.append('--load-images=no')
# service_args.append('--disk-cache=yes')
# service_args.append('--ignore-ssl-errors=true')
driver = webdriver.PhantomJS(desired_capabilities=dcap,service_args=service_args)
driver.set_page_load_timeout(15)
driver.set_script_timeout(15)#这两种设置都进行才有效
driver.implicitly_wait(15)#隐性等待
return driver
def getChromeWebDriver():
options = webdriver.ChromeOptions()
options.add_argument(
'user-agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/27.0.1453.93 Chrome/27.0.1453.93 Safari/537.36"')
service_args=[]
# service_args.append('--load-images=no')
# service_args.append('--disk-cache=yes')
# service_args.append('--ignore-ssl-errors=true')
driver = webdriver.Chrome(chrome_options=options,service_args=service_args)
driver.set_page_load_timeout(15)
driver.set_script_timeout(15)#这两种设置都进行才有效
driver.implicitly_wait(15)#隐性等待
return driver
def getChromeMobileDriver():
options = webdriver.ChromeOptions()
options.add_argument('lang=zh_CN.UTF-8')
options.add_argument('user-agent="Mozilla/5.0 (iPhone; CPU iPhone OS 9_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13C75 Safari/601.1"')
service_args=[]
# service_args.append('--load-images=no')
# service_args.append('--disk-cache=yes')
# service_args.append('--ignore-ssl-errors=true')
driver = webdriver.Chrome(chrome_options=options,service_args=service_args)
driver.set_page_load_timeout(15)
driver.set_script_timeout(15)#这两种设置都进行才有效
driver.implicitly_wait(15)#隐性等待
return driver
def getChromeHeaderlessWebDriver():
display = Display(visible=0, size=(800, 600))
display.start()
options = webdriver.ChromeOptions()
options.add_argument(
'user-agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/27.0.1453.93 Chrome/27.0.1453.93 Safari/537.36"')
# chrome_options.add_argument('lang=zh_CN.UTF-8')
# chrome_options.add_argument('--headless')
service_args=[]
# service_args.append('--load-images=no')
# service_args.append('--disk-cache=yes')
# service_args.append('--ignore-ssl-errors=true')
driver = webdriver.Chrome(chrome_options=options,service_args=service_args)
driver.set_page_load_timeout(15)
driver.set_script_timeout(15)#这两种设置都进行才有效
driver.implicitly_wait(15)#隐性等待
return driver
def getChromeHeaderlessMobileDriver():
display = Display(visible=0, size=(800, 600))
display.start()
options = webdriver.ChromeOptions()
# options.add_argument('lang=zh_CN.UTF-8')
# options.add_argument('--headless')
options.add_argument('user-agent="Mozilla/5.0 (iPhone; CPU iPhone OS 9_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13C75 Safari/601.1"')
service_args=[]
# service_args.append('--load-images=no')
# service_args.append('--disk-cache=yes')
# service_args.append('--ignore-ssl-errors=true')
driver = webdriver.Chrome(chrome_options=options,service_args=service_args)
driver.set_page_load_timeout(15)
driver.set_script_timeout(15)#这两种设置都进行才有效
driver.implicitly_wait(15)#隐性等待
return driver
| 45.613861
| 173
| 0.73562
| 627
| 4,607
| 5.247209
| 0.183413
| 0.120365
| 0.093009
| 0.080243
| 0.822188
| 0.822188
| 0.822188
| 0.822188
| 0.81307
| 0.81307
| 0
| 0.053513
| 0.131973
| 4,607
| 101
| 174
| 45.613861
| 0.769192
| 0.226612
| 0
| 0.760563
| 0
| 0.084507
| 0.261264
| 0.056673
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084507
| false
| 0
| 0.112676
| 0
| 0.28169
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb9f3d443c3682487286df6e81bf97ba26a490f6
| 24,319
|
py
|
Python
|
sub_uts_BO/systems.py
|
panos108/MBDoE-total
|
5691d2d1615667b94cbf4cf107df543fe8148650
|
[
"MIT"
] | 1
|
2021-09-30T08:48:31.000Z
|
2021-09-30T08:48:31.000Z
|
sub_uts_BO/systems.py
|
panos108/MBDoE-total
|
5691d2d1615667b94cbf4cf107df543fe8148650
|
[
"MIT"
] | 2
|
2020-12-03T18:05:59.000Z
|
2020-12-03T18:06:33.000Z
|
sub_uts_BO/systems.py
|
panos108/MBDoE-total
|
5691d2d1615667b94cbf4cf107df543fe8148650
|
[
"MIT"
] | null | null | null |
# v2 includes shaping the TR with the curvature of the problem by a broyden update on derivatives
# and a BFGS update on the Hessian, however the TR becomes very small in some parts, so the approach
# does not seem to be too effective.
import time
import random
import numpy as np
import numpy.random as rnd
from scipy.spatial.distance import cdist
import sobol_seq
from scipy.optimize import minimize
from scipy.optimize import broyden1
from scipy import linalg
import scipy
import matplotlib.pyplot as plt
import functools
from matplotlib.patches import Ellipse
from casadi import *
def Benoit_Model(u):
f = u[0] ** 2 + u[1] ** 2
return f
def con1_model(u):
g1 = 1. - u[0] + u[1] ** 2
return -g1
def Benoit_System(u):
f = u[0] ** 2 + u[1] ** 2 + u[0] * u[1] + np.random.normal(0., np.sqrt(1e-3))
return f
def con1_system(u):
g1 = 1. - u[0] + u[1] ** 2 + 2. * u[1] - 2. + np.random.normal(0., np.sqrt(1e-3))
return -g1
def con1_system_tight(u):
g1 = 1. - u[0] + u[1] ** 2 + 2. * u[1] + np.random.normal(0., np.sqrt(1e-3))
return -g1
def Benoit_System_noiseless(u):
f = u[0] ** 2 + u[1] ** 2 + u[0] * u[1] # + np.random.normal(0., np.sqrt(1e-3))
return f
def con1_system_noiseless(u):
g1 = 1. - u[0] + u[1] ** 2 + 2. * u[1] - 2. # + np.random.normal(0., np.sqrt(1e-3))
return -g1
def con1_system_tight_noiseless(u):
g1 = 1. - u[0] + u[1] ** 2 + 2. * u[1] # + np.random.normal(0., np.sqrt(1e-3))
return -g1
class WO_system:
# Parameters
Fa = 1.8275
Mt = 2105.2
# kinetic parameters
phi1 = - 3.
psi1 = -17.
phi2 = - 4.
psi2 = -29.
# Reference temperature
Tref = 110. + 273.15 # [=] K.
def __init__(self):
self.xd, self.xa, self.u, self.ODEeq, self.Aeq, self.states, self.algebraics, self.inputs = self.DAE_system()
self.eval = self.integrator_system()
def DAE_system(self):
# Define vectors with names of states
states = ['x']
nd = len(states)
xd = SX.sym('xd', nd)
for i in range(nd):
globals()[states[i]] = xd[i]
# Define vectors with names of algebraic variables
algebraics = ['Xa', 'Xb', 'Xc', 'Xe', 'Xp', 'Xg']
na = len(algebraics)
xa = SX.sym('xa', na)
for i in range(na):
globals()[algebraics[i]] = xa[i]
inputs = ['Fb', 'Tr']
nu = len(inputs)
u = SX.sym("u", nu)
for i in range(nu):
globals()[inputs[i]] = u[i]
# Reparametrization
k1 = 1.6599e6 * np.exp(-6666.7 / (Tr + 273.15))
k2 = 7.2117e8 * np.exp(-8333.3 / (Tr + 273.15))
k3 = 2.6745e12 * np.exp(-11111. / (Tr + 273.15))
# reaction rate
Fr = Fa + Fb
r1 = k1 * Xa * Xb * Mt
r2 = k2 * Xb * Xc * Mt
r3 = k3 * Xc * Xp * Mt
# residual for x
x_res = np.zeros((6, 1))
x_res[0, 0] = (Fa - r1 - Fr * Xa) / Mt
x_res[1, 0] = (Fb - r1 - r2 - Fr * Xb) / Mt
x_res[2, 0] = (+ 2 * r1 - 2 * r2 - r3 - Fr * Xc) / Mt
x_res[3, 0] = (+ 2 * r2 - Fr * Xe) / Mt
x_res[4, 0] = (+ r2 - 0.5 * r3 - Fr * Xp) / Mt
x_res[5, 0] = (+ 1.5 * r3 - Fr * Xg) / Mt
# Define vectors with banes of input variables
ODEeq = [0 * x]
# Declare algebraic equations
Aeq = []
Aeq += [(Fa - r1 - Fr * Xa) / Mt]
Aeq += [(Fb - r1 - r2 - Fr * Xb) / Mt]
Aeq += [(+ 2 * r1 - 2 * r2 - r3 - Fr * Xc) / Mt]
Aeq += [(+ 2 * r2 - Fr * Xe) / Mt]
Aeq += [(+ r2 - 0.5 * r3 - Fr * Xp) / Mt]
Aeq += [(+ 1.5 * r3 - Fr * Xg) / Mt]
return xd, xa, u, ODEeq, Aeq, states, algebraics, inputs
def integrator_system(self):
"""
This function constructs the integrator to be suitable with casadi environment, for the equations of the model
and the objective function with variable time step.
inputs: NaN
outputs: F: Function([x, u, dt]--> [xf, obj])
"""
xd, xa, u, ODEeq, Aeq, states, algebraics, inputs = self.DAE_system()
VV = Function('vfcn', [xa, u], [vertcat(*Aeq)], ['w0', 'u'], ['w'])
solver = rootfinder('solver', 'newton', VV)
return solver
def WO_obj_sys_ca(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.0260265, 0.207296, 0.0923376, 0.0339309]), u)
Fb = u[0]
Tr = u[1]
Fa = 1.8275
Fr = Fa + Fb
obj = -(1043.38 * x[4] * Fr +
20.92 * x[3] * Fr -
79.23 * Fa -
118.34 * Fb) + 0.5 * np.random.normal(0., 1)
return obj
def WO_obj_sys_ca_noise_less(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.0260265, 0.207296, 0.0923376, 0.0339309]), u)
Fb = u[0]
Tr = u[1]
Fa = 1.8275
Fr = Fa + Fb
obj = -(1043.38 * x[4] * Fr +
20.92 * x[3] * Fr -
79.23 * Fa -
118.34 * Fb) # + 0.5*np.random.normal(0., 1)
return obj
def WO_con1_sys_ca(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.0260265, 0.207296, 0.0923376, 0.0339309]), u)
pcon1 = x[0] - 0.12 + 5e-4 * np.random.normal(0., 1)
return -pcon1.toarray()[0]
def WO_con2_sys_ca(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.0260265, 0.207296, 0.0923376, 0.0339309]), u)
pcon2 = x[5] - 0.08 + 5e-4 * np.random.normal(0., 1)
return -pcon2.toarray()[0]
def WO_con1_sys_ca_noise_less(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.0260265, 0.207296, 0.0923376, 0.0339309]), u)
pcon1 = x[0] - 0.12 # + 5e-4*np.random.normal(0., 1)
return -pcon1.toarray()[0]
def WO_con2_sys_ca_noise_less(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.0260265, 0.207296, 0.0923376, 0.0339309]), u)
pcon2 = x[5] - 0.08 # + 5e-4*np.random.normal(0., 1)
return -pcon2.toarray()[0]
class WO_model:
# Parameters
Fa = 1.8275
Mt = 2105.2
# kinetic parameters
phi1 = - 3.
psi1 = -17.
phi2 = - 4.
psi2 = -29.
# Reference temperature
Tref = 110. + 273.15 # [=] K.
def __init__(self):
self.xd, self.xa, self.u, self.ODEeq, self.Aeq, self.states, self.algebraics, self.inputs = self.DAE_model()
self.eval = self.integrator_model()
def DAE_model(self):
# Define vectors with names of states
states = ['x']
nd = len(states)
xd = SX.sym('xd', nd)
for i in range(nd):
globals()[states[i]] = xd[i]
# Define vectors with names of algebraic variables
algebraics = ['Xa', 'Xb', 'Xe', 'Xp', 'Xg']
na = len(algebraics)
xa = SX.sym('xa', na)
for i in range(na):
globals()[algebraics[i]] = xa[i]
# Define vectors with banes of input variables
inputs = ['Fb', 'Tr']
nu = len(inputs)
u = SX.sym("u", nu)
for i in range(nu):
globals()[inputs[i]] = u[i]
k1 = np.exp(phi1) * np.exp((Tref / (Tr + 273.15) - 1) * psi1)
k2 = np.exp(phi2) * np.exp((Tref / (Tr + 273.15) - 1) * psi2)
# reaction rate
Fr = Fa + Fb
r1 = k1 * Xa * Xb * Xb * Mt
r2 = k2 * Xa * Xb * Xp * Mt
ODEeq = [0 * x]
# Declare algebraic equations
Aeq = []
Aeq += [Fa - r1 - r2 - Fr * Xa]
Aeq += [Fb - 2 * r1 - r2 - Fr * Xb]
Aeq += [+ 2 * r1 - Fr * Xe]
Aeq += [+ r1 - r2 - Fr * Xp]
Aeq += [+ 3 * r2 - Fr * Xg]
return xd, xa, u, ODEeq, Aeq, states, algebraics, inputs
def integrator_model(self):
"""
This function constructs the integrator to be suitable with casadi environment, for the equations of the model
and the objective function with variable time step.
inputs: NaN
outputs: F: Function([x, u, dt]--> [xf, obj])
"""
xd, xa, u, ODEeq, Aeq, states, algebraics, inputs = self.DAE_model()
VV = Function('vfcn', [xa, u], [vertcat(*Aeq)], ['w0', 'u'], ['w'])
solver = rootfinder('solver', 'newton', VV)
# model = functools.partial(solver, np.zeros(np.shape(xa)))
return solver
def WO_obj_ca(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.207296, 0.0923376, 0.0339309]), u)
Fb = u[0]
Tr = u[1]
Fa = 1.8275
Fr = Fa + Fb
obj = -(1043.38 * x[3] * Fr +
20.92 * x[2] * Fr -
79.23 * Fa -
118.34 * Fb)
return obj
def WO_con1_model_ca(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.207296, 0.0923376, 0.0339309]), u)
pcon1 = x[0] - 0.12 # + 5e-4*np.random.normal(1., 1)
return -pcon1.toarray()[0]
def WO_con2_model_ca(self, u):
x = self.eval(np.array([0.114805, 0.525604, 0.207296, 0.0923376, 0.0339309]), u)
pcon2 = x[4] - 0.08 # + 5e-4*np.random.normal(1., 1)
return -pcon2.toarray()[0]
def con_empty(u):
g1 = 0.
return -g1
def obj_empty(u):
f = 0.
return f
class Bio_system:
def __init__(self):
self.nk, self.tf, self.x0, _, _ = self.specifications()
self.xd, self.xa, self.u, _, self.ODEeq, self.Aeq, self.u_min, self.u_max,\
self.states, self.algebraics, self.inputs, self.nd, self.na, self.nu, \
self.nmp,self. modparval= self.DAE_system()
self.eval = self.integrator_model()
self.Sigma_v = [400.,1e5,1e-2]*diag(np.ones(self.nd))*1e-7*0
def specifications(self):
''' Specify Problem parameters '''
tf = 240. # final time
nk = 12 # sampling points
x0 = np.array([1., 150., 0.])
Lsolver = 'mumps' # 'ma97' # Linear solver
c_code = False # c_code
return nk, tf, x0, Lsolver, c_code
def DAE_system(self):
# Define vectors with names of states
states = ['x', 'n', 'q']
nd = len(states)
xd = SX.sym('xd', nd)
for i in range(nd):
globals()[states[i]] = xd[i]
# Define vectors with names of algebraic variables
algebraics = []
na = len(algebraics)
xa = SX.sym('xa', na)
for i in range(na):
globals()[algebraics[i]] = xa[i]
# Define vectors with banes of input variables
inputs = ['L', 'Fn']
nu = len(inputs)
u = SX.sym("u", nu)
for i in range(nu):
globals()[inputs[i]] = u[i]
# Define model parameter names and values
modpar = ['u_m', 'k_s', 'k_i', 'K_N', 'u_d', 'Y_nx', 'k_m', 'k_sq',
'k_iq', 'k_d', 'K_Np']
modparval = [0.0923 * 0.62, 178.85, 447.12, 393.10, 0.001, 504.49,
2.544 * 0.62 * 1e-4, 23.51, 800.0, 0.281, 16.89]
nmp = len(modpar)
for i in range(nmp):
globals()[modpar[i]] = SX(modparval[i])
# Additive measurement noise
# Sigma_v = [400.,1e5,1e-2]*diag(np.ones(nd))*1e-6
# Additive disturbance noise
# Sigma_w = [400.,1e5,1e-2]*diag(np.ones(nd))*1e-6
# Initial additive disturbance noise
# Sigma_w0 = [1.,150.**2,0.]*diag(np.ones(nd))*1e-3
# Declare ODE equations (use notation as defined above)
dx = u_m * L / (L + k_s + L ** 2. / k_i) * x * n / (n + K_N) - u_d * x
dn = - Y_nx * u_m * L / (L + k_s + L ** 2. / k_i) * x * n / (n + K_N) + Fn
dq = k_m * L / (L + k_sq + L ** 2. / k_iq) * x - k_d * q / (n + K_Np)
ODEeq = [dx, dn, dq]
# Declare algebraic equations
Aeq = []
# Define control bounds
u_min = np.array([120., 0.]) # lower bound of inputs
u_max = np.array([400., 40.]) # upper bound of inputs
# Define objective to be minimized
t = SX.sym('t')
return xd, xa, u, 0, ODEeq, Aeq, u_min, u_max, states, algebraics, inputs, nd, na, nu, nmp, modparval
def integrator_model(self):
"""
This function constructs the integrator to be suitable with casadi environment, for the equations of the model
and the objective function with variable time step.
inputs: NaN
outputs: F: Function([x, u, dt]--> [xf, obj])
"""
xd, xa, u, uncertainty, ODEeq, Aeq, u_min, u_max, states, algebraics, inputs, nd, na, nu, nmp, modparval \
= self.DAE_system()
dae = {'x': vertcat(xd), 'z': vertcat(xa), 'p': vertcat(u),
'ode': vertcat(*ODEeq), 'alg': vertcat(*Aeq)}
opts = {'tf': self.tf / self.nk} # interval length
F = integrator('F', 'idas', dae, opts)
# model = functools.partial(solver, np.zeros(np.shape(xa)))
return F
def bio_obj_ca(self, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2))
u = u0 * (self.u_max - self.u_min) + self.u_min
for i in range(self.nk):
xd = self.eval(x0=vertcat(np.array(x)), p=vertcat(u[i]))#self.eval(np.array([0.114805, 0.525604, 0.207296, 0.0923376, 0.0339309]), u)
x = np.array(xd['xf'].T)[0]
return -x[-1] + np.random.multivariate_normal([0.]*self.nd,np.array(self.Sigma_v))[-1]
def bio_con1_ca(self, n, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2))
u = u0 * (self.u_max - self.u_min) + self.u_min
for i in range(n):
xd = self.eval(x0=vertcat(np.array(x)), p=vertcat(u[i]))#self.eval(np.array([0.114805, 0.525604, 0.207296, 0.0923376, 0.0339309]), u)
x = np.array(xd['xf'].T)[0]
x[1] += np.random.multivariate_normal([0.]*self.nd,np.array(self.Sigma_v))[1]
pcon1 = x[1]/800 - 1
return -pcon1#.toarray()[0]
def bio_con2_ca(self, n, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2) )
u = u0* (self.u_max - self.u_min) + self.u_min
for i in range(n):
xd = self.eval(x0=vertcat(np.array(x)), p=vertcat(u[i]))#self.eval(np.array([0.114805, 0.525604, 0.207296, 0.0923376, 0.0339309]), u)
x = np.array(xd['xf'].T)[0]
x += np.random.multivariate_normal([0.]*self.nd,np.array(self.Sigma_v))
pcon1 = x[2]/(0.011 * x[0])-1
return -pcon1#.toarray()[0]
class Bio_model:
def __init__(self):
self.nk, self.tf, self.x0, _, _ = self.specifications()
self.xd, self.xa, self.u, _, self.ODEeq, self.Aeq, self.u_min, self.u_max,\
self.states, self.algebraics, self.inputs, self.nd, self.na, self.nu, \
self.nmp,self. modparval= self.DAE_system()
self.eval = self.integrator_model()
def specifications(self):
''' Specify Problem parameters '''
tf = 240. # final time
nk = 12 # sampling points
x0 = np.array([1., 150., 0.])
Lsolver = 'mumps' # 'ma97' # Linear solver
c_code = False # c_code
return nk, tf, x0, Lsolver, c_code
def DAE_system(self):
# Define vectors with names of states
states = ['x', 'n', 'q']
nd = len(states)
xd = SX.sym('xd', nd)
for i in range(nd):
globals()[states[i]] = xd[i]
# Define vectors with names of algebraic variables
algebraics = []
na = len(algebraics)
xa = SX.sym('xa', na)
for i in range(na):
globals()[algebraics[i]] = xa[i]
# Define vectors with banes of input variables
inputs = ['L', 'Fn']
nu = len(inputs)
u = SX.sym("u", nu)
for i in range(nu):
globals()[inputs[i]] = u[i]
# Define model parameter names and values
modpar = ['u_m', 'k_s', 'k_i', 'K_N', 'u_d', 'Y_nx', 'k_m', 'k_sq',
'k_iq', 'k_d', 'K_Np']
modparval = [0.0923 * 0.62, 178.85, 447.12, 393.10, 0.001, 504.49,
2.544 * 0.62 * 1e-4, 23.51, 800.0, 0.281, 16.89]
nmp = len(modpar)
for i in range(nmp):
globals()[modpar[i]] = SX(modparval[i])
# Additive measurement noise
# Sigma_v = [400.,1e5,1e-2]*diag(np.ones(nd))*1e-6
# Additive disturbance noise
# Sigma_w = [400.,1e5,1e-2]*diag(np.ones(nd))*1e-6
# Initial additive disturbance noise
# Sigma_w0 = [1.,150.**2,0.]*diag(np.ones(nd))*1e-3
# Declare ODE equations (use notation as defined above)
dx = u_m * L / (L + k_s) * x * n / (n + K_N) - u_d * x
dn = - Y_nx * u_m * L / (L + k_s) * x * n / (n + K_N) + Fn
dq = k_m * L / (L + k_sq) * x - k_d * q / (n + K_Np)
ODEeq = [dx, dn, dq]
# Declare algebraic equations
Aeq = []
# Define control bounds
u_min = np.array([120., 0.]) # lower bound of inputs
u_max = np.array([400., 40.]) # upper bound of inputs
# Define objective to be minimized
t = SX.sym('t')
return xd, xa, u, 0, ODEeq, Aeq, u_min, u_max, states, algebraics, inputs, nd, na, nu, nmp, modparval
def integrator_model(self):
"""
This function constructs the integrator to be suitable with casadi environment, for the equations of the model
and the objective function with variable time step.
inputs: NaN
outputs: F: Function([x, u, dt]--> [xf, obj])
"""
xd, xa, u, uncertainty, ODEeq, Aeq, u_min, u_max, states, algebraics, inputs, nd, na, nu, nmp, modparval \
= self.DAE_system()
ODEeq_ = vertcat(*ODEeq)
self.ODEeq = Function('f', [xd, u], [vertcat(*ODEeq)], ['x0', 'p'], ['xdot'])
dae = {'x': vertcat(xd), 'z': vertcat(xa), 'p': vertcat(u),
'ode': vertcat(*ODEeq), 'alg': vertcat(*Aeq)}
opts = {'tf': self.tf / self.nk} # interval length
F = integrator('F', 'idas', dae, opts)
# model = functools.partial(solver, np.zeros(np.shape(xa)))
return F
def bio_obj_ca(self, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2))
u = np.array(u0).reshape(-1,1) * (self.u_max - self.u_min) + self.u_min
for i in range(self.nk):
if np.any(x<0):
print(2)
elif np.any(u[i]<0):
print(2)
for j in range(self.nk):
if u[j,1]<0:
u[j,1]= 0.
xd = self.eval(x0=vertcat(np.array(x)), p=vertcat(u[i]))
x = np.array(xd['xf'].T)[0]
for j in range(self.nd):
if x[j]<0:
x[j]=0
return -x[-1]
def bio_con1_ca(self, n, u0):
x = self.x0
u1 = np.array(u0).reshape((self.nk,2))
u = np.array(u1).reshape(-1,1) * (self.u_max - self.u_min) + self.u_min
for i in range(n):
if np.any(x<0):
print(2)
elif np.any(u[i]<0):
print(2)
for j in range(self.nk):
if u[j,1]<0:
u[j,1]= 0.
xd = self.eval(x0=vertcat(np.array(x)), p=vertcat(u[i]))
x = np.array(xd['xf'].T)[0]
for j in range(self.nd):
if x[j]<0:
x[j]=0
pcon1 = x[1]/800-1 # + 5e-4*np.random.normal(1., 1)
return -pcon1#.toarray()[0]
def bio_con2_ca(self, n, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2))
u = np.array(u0).reshape((-1,1)) * (self.u_max - self.u_min) + self.u_min
for i in range(n):
if np.any(x<0):
print(2)
elif np.any(u[i]<0):
print(2)
for j in range(self.nk):
if u[j,1]<0:
u[j,1]= 0.
xd = self.eval(x0=vertcat(np.array(x)), p=vertcat(u[i]))
x = np.array(xd['xf'].T)[0]
for j in range(self.nd):
if x[j]<0:
x[j]=0
pcon1 = x[2]/(0.011 * x[0])-1 # + 5e-4*np.random.normal(1., 1)
return -pcon1#.toarray()[0]
def bio_obj_ca_RK4(self, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2))
u = np.array(u0).reshape((-1,1)) * (self.u_max - self.u_min) + self.u_min
DT = self.tf/self.nk/4
for i in range(self.nk):
if np.any(x<0):
print(2)
elif np.any(u[i]<0):
print(2)
for j in range(self.nk):
if u[j,1]<0:
u[j,1]= 0.
f = self.ODEeq
for j in range(4):
k1 = f(x0=vertcat(np.array(x)), p=vertcat(u[i]))['xdot']
k2 = f(x0=vertcat(np.array(x + DT / 2 * k1)),p=vertcat(u[i]))['xdot']
k3 = f(x0=vertcat(np.array(x + DT / 2 * k2)), p=vertcat(u[i]))['xdot']
k4 = f(x0=vertcat(np.array(x + DT * k2)), p= vertcat(u[i]))['xdot']
x = x + DT / 6 * (k1 + 2 * k2 + 2 * k3 + k4)
# xd = self.eval(x0=vertcat(np.array(x1)), p=vertcat(u[i]))
# x1 = np.array(xd['xf'].T)[0]
for j in range(self.nd):
if x[j]<0:
x[j]=0
return -x[-1].toarray()[0][0]
def bio_con1_ca_RK4(self, n, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2))
u = u0 * (self.u_max - self.u_min) + self.u_min
DT = self.tf/self.nk/4
for i in range(n):
if np.any(x<0):
print(2)
elif np.any(u[i]<0):
print(2)
for j in range(self.nk):
if u[j,1]<0:
u[j,1]= 0.
f = self.ODEeq
for j in range(4):
k1 = f(x0=vertcat(np.array(x)), p=vertcat(u[i]))['xdot']
k2 = f(x0=vertcat(np.array(x + DT / 2 * k1)),p=vertcat(u[i]))['xdot']
k3 = f(x0=vertcat(np.array(x + DT / 2 * k2)), p=vertcat(u[i]))['xdot']
k4 = f(x0=vertcat(np.array(x + DT * k2)), p= vertcat(u[i]))['xdot']
x = x + DT / 6 * (k1 + 2 * k2 + 2 * k3 + k4)
for j in range(self.nd):
if x[j]<0:
x[j]=0
pcon1 = x[1]/800 -1 # + 5e-4*np.random.normal(1., 1)
return -pcon1.toarray()[0][0]
def bio_con2_ca_RK4(self, n, u0):
x = self.x0
u0 = np.array(u0).reshape((self.nk,2))
u = np.array(u0).reshape((-1,1)) * (self.u_max - self.u_min) + self.u_min
DT = self.tf/self.nk/4
for i in range(n):
if np.any(x<0):
print(2)
elif np.any(u[i]<0):
print(2)
for j in range(self.nk):
if u[j,1]<0:
u[j,1]= 0.
f = self.ODEeq
for j in range(4):
k1 = f(x0=vertcat(np.array(x)), p=vertcat(u[i]))['xdot']
k2 = f(x0=vertcat(np.array(x + DT / 2 * k1)),p=vertcat(u[i]))['xdot']
k3 = f(x0=vertcat(np.array(x + DT / 2 * k2)), p=vertcat(u[i]))['xdot']
k4 = f(x0=vertcat(np.array(x + DT * k2)), p= vertcat(u[i]))['xdot']
x = x + DT / 6 * (k1 + 2 * k2 + 2 * k3 + k4)
for j in range(self.nd):
if x[j]<0:
x[j]=0
pcon1 = x[2]/(0.011 * x[0])-1 # + 5e-4*np.random.normal(1., 1)
return -pcon1.toarray()[0][0]
def bio_model_ca(self):
M = 4 # RK4 steps per interval
X0 = SX.sym('X0', self.nd)
U = SX.sym('U', self.nu,1)
u = U * (self.u_max - self.u_min) + self.u_min
DT = self.tf/self.nk/M
f = self.ODEeq
X = X0
for j in range(M):
k1 = f(X, u)
k2 = f(X + DT / 2 * k1, u)
k3 = f(X + DT / 2 * k2, u)
k4 = f(X + DT * k2, u)
X = X + DT / 6 * (k1 + 2 * k2 + 2 * k3 + k4)
F = Function('F', [X0, U], [X], ['x0', 'u'], ['xf'])
return F
def bio_obj_ca_f(self, x):
return -x[-1]
def bio_con1_ca_f(self, x):
pcon1 = x[1]/800 -1 # + 5e-4*np.random.normal(1., 1)
return pcon1
def bio_con2_ca_f(self, x):
pcon1 = x[2]/(0.011 * x[0])-1 # + 5e-4*np.random.normal(1., 1)
return pcon1
| 31.87287
| 145
| 0.492783
| 3,774
| 24,319
| 3.109698
| 0.083201
| 0.036384
| 0.011759
| 0.021558
| 0.895876
| 0.881987
| 0.869717
| 0.853698
| 0.846029
| 0.838957
| 0
| 0.094484
| 0.338048
| 24,319
| 763
| 146
| 31.87287
| 0.634551
| 0.165755
| 0
| 0.742915
| 0
| 0
| 0.014954
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093117
| false
| 0
| 0.02834
| 0.002024
| 0.242915
| 0.024292
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fba16da51dd6508982d8d6781f823752492b8bf4
| 119
|
py
|
Python
|
python/tests/my_test.py
|
CodyScottJohnson/alumni-face-rec
|
9810e77e477a6900245faf84fa3aed2fc3fa29ca
|
[
"MIT"
] | null | null | null |
python/tests/my_test.py
|
CodyScottJohnson/alumni-face-rec
|
9810e77e477a6900245faf84fa3aed2fc3fa29ca
|
[
"MIT"
] | null | null | null |
python/tests/my_test.py
|
CodyScottJohnson/alumni-face-rec
|
9810e77e477a6900245faf84fa3aed2fc3fa29ca
|
[
"MIT"
] | null | null | null |
from faceRec.run import is_this_just_fantasy
def test_is_this_the_real_life():
assert not is_this_just_fantasy()
| 19.833333
| 44
| 0.823529
| 21
| 119
| 4.142857
| 0.714286
| 0.206897
| 0.229885
| 0.390805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12605
| 119
| 5
| 45
| 23.8
| 0.836538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
fba72bbbaff64cd4132adb7a6d40a975420aefff
| 4,886
|
py
|
Python
|
test.py
|
jkoors/github-tutorial-calculator
|
0d0801e8bf0120f6eddbd7bd8e3cc7a9432ce9d9
|
[
"MIT"
] | null | null | null |
test.py
|
jkoors/github-tutorial-calculator
|
0d0801e8bf0120f6eddbd7bd8e3cc7a9432ce9d9
|
[
"MIT"
] | null | null | null |
test.py
|
jkoors/github-tutorial-calculator
|
0d0801e8bf0120f6eddbd7bd8e3cc7a9432ce9d9
|
[
"MIT"
] | null | null | null |
import unittest
from calculator import Calculator
import math
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calculator = Calculator()
def test_add(self):
"""Tests the add function for every combination of 1, 0 and -1.
May be redundant but checks if communitive property is respected.
"""
# Where x = 1
self.assertEqual(self.calculator.add(1, 1), 2)
self.assertEqual(self.calculator.add(1, 0), 1)
self.assertEqual(self.calculator.add(1, -1), 0)
# Where x = 0
self.assertEqual(self.calculator.add(0, 1), 1)
self.assertEqual(self.calculator.add(0, 0), 0)
self.assertEqual(self.calculator.add(0, -1), -1)
# Where x = -1
self.assertEqual(self.calculator.add(-1, 1), 0)
self.assertEqual(self.calculator.add(-1, 0), -1)
self.assertEqual(self.calculator.add(-1, -1), -2)
def test_subtract(self):
"""Tests the subtract function for every combination of 1, 0 and -1.
May be redundant but checks if communitive property is respected.
"""
# Where x = 1
self.assertEqual(self.calculator.subtract(1, 1), 0)
self.assertEqual(self.calculator.subtract(1, 0), 1)
self.assertEqual(self.calculator.subtract(1, -1), 2)
# Where x = 0
self.assertEqual(self.calculator.subtract(0, 1), -1)
self.assertEqual(self.calculator.subtract(0, 0), 0)
self.assertEqual(self.calculator.subtract(0, -1), 1)
# Where x = -1
self.assertEqual(self.calculator.subtract(-1, 1), -2)
self.assertEqual(self.calculator.subtract(-1, 0), -1)
self.assertEqual(self.calculator.subtract(-1, -1), 0)
def test_multiply(self):
"""Tests the multiply function for every combination of 1, 0 and -1.
May be redundant but checks if communitive property is respected.
"""
# Where x = 1
self.assertEqual(self.calculator.multiply(1, 1), 1)
self.assertEqual(self.calculator.multiply(1, 0), 0)
self.assertEqual(self.calculator.multiply(1, -1), -1)
# Where x = 0
self.assertEqual(self.calculator.multiply(0, 1), 0)
self.assertEqual(self.calculator.multiply(0, 0), 0)
self.assertEqual(self.calculator.multiply(0, -1), 0)
# Where x = -1
self.assertEqual(self.calculator.multiply(-1, 1), -1)
self.assertEqual(self.calculator.multiply(-1, 0), 0)
self.assertEqual(self.calculator.multiply(-1, -1), 1)
def test_divide(self):
"""Tests the divide function for every combination of 1, 0 and -1.
May be redundant but checks if communitive property is respected.
Note: Since our divide function will throw ZeroDivisionErrors when
passing a value of 0 for y, you'll notice we use assertRaises to ensure
that these exceptions are thrown when expected.
"""
# Where x = 1
self.assertEqual(self.calculator.divide(1, 1), 1)
self.assertRaises(ZeroDivisionError, self.calculator.divide, 1, 0)
self.assertEqual(self.calculator.divide(1, -1), -1)
# Where x = 0
self.assertEqual(self.calculator.divide(0, 1), 0)
self.assertRaises(ZeroDivisionError, self.calculator.divide, 0, 0)
self.assertEqual(self.calculator.divide(0, -1), 0)
# Where x = -1
self.assertEqual(self.calculator.divide(-1, 1), -1)
self.assertRaises(ZeroDivisionError, self.calculator.divide, -1, 0)
self.assertEqual(self.calculator.divide(-1, -1), 1)
def test_tan(self):
"""Tests the subtract function for every combination of 1, 0 and -1.
May be redundant but checks if communitive property is respected.
"""
self.assertTrue(math.isclose(self.calculator.tan(0), 0))
self.assertTrue(math.isclose(self.calculator.tan(-3), 0.142546543074))
self.assertTrue(math.isclose(self.calculator.tan(3), -0.142546543074))
self.assertTrue(math.isclose(self.calculator.tan(math.pi/4), 1))
def test_square(self):
"""Tests the square function for every combination of 1, 0 and -1.
May be redundant but checks if communitive property is respected.
"""
# Where x = 1, 0, -1
self.assertEqual(self.calculator.square(1), 1)
self.assertEqual(self.calculator.square(0), 0)
self.assertEqual(self.calculator.square(-1), 1)
def test_log(self):
self.assertTrue(math.isclose(self.calculator.log(2,10), 0.30103, rel_tol=0.05))
self.assertTrue(math.isclose(self.calculator.log(10,10), 1))
self.assertTrue(math.isclose(self.calculator.log(100, 10), 2))
if __name__ == '__main__':
unittest.main()
| 40.716667
| 88
| 0.624233
| 639
| 4,886
| 4.748044
| 0.12989
| 0.216875
| 0.225445
| 0.3441
| 0.839815
| 0.835201
| 0.801582
| 0.683916
| 0.683916
| 0.601516
| 0
| 0.05469
| 0.251535
| 4,886
| 119
| 89
| 41.058824
| 0.774952
| 0.23291
| 0
| 0
| 0
| 0
| 0.002312
| 0
| 0
| 0
| 0
| 0
| 0.754098
| 1
| 0.131148
| false
| 0
| 0.04918
| 0
| 0.196721
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fbcf42109868ccad8db343fbb187f803d8e4f837
| 1,980
|
py
|
Python
|
home/hairygael/GESTURES/releaseleftclothes.py
|
rv8flyboy/pyrobotlab
|
4e04fb751614a5cb6044ea15dcfcf885db8be65a
|
[
"Apache-2.0"
] | 63
|
2015-02-03T18:49:43.000Z
|
2022-03-29T03:52:24.000Z
|
home/hairygael/GESTURES/releaseleftclothes.py
|
hirwaHenryChristian/pyrobotlab
|
2debb381fc2db4be1e7ea6e5252a50ae0de6f4a9
|
[
"Apache-2.0"
] | 16
|
2016-01-26T19:13:29.000Z
|
2018-11-25T21:20:51.000Z
|
home/hairygael/GESTURES/releaseleftclothes.py
|
hirwaHenryChristian/pyrobotlab
|
2debb381fc2db4be1e7ea6e5252a50ae0de6f4a9
|
[
"Apache-2.0"
] | 151
|
2015-01-03T18:55:54.000Z
|
2022-03-04T07:04:23.000Z
|
def releaseleftclothes():
##arms get to middle
i01.setHandSpeed("left", 1.0, 0.80, 0.80, 0.80, 1.0, 0.80)
i01.setHandSpeed("right", 1.0, 0.70, 0.70, 1.0, 1.0, 0.80)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.90, 0.80)
i01.setTorsoSpeed(1.0,0.80,1.0)
i01.moveHead(0,80,82,0,65)
i01.moveArm("left",97,51,25,27)
i01.moveArm("right",81,52,22,18)
i01.moveHand("left",92,33,37,71,66,25)
i01.moveHand("right",180,180,180,180,180,180)
i01.moveTorso(90,90,90)
sleep(4)
##arms spread
i01.setHandSpeed("left", 1.0, 0.80, 0.80, 0.80, 1.0, 0.80)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.90, 0.80)
i01.setTorsoSpeed(1.0,0.80,1.0)
sleep(2)
i01.moveHead(90,90,82,78,65)
i01.moveArm("left",97,51,25,22)
i01.moveArm("right",90,135,22,36)
i01.moveHand("left",92,33,37,71,66,25)
i01.moveHand("right",180,180,180,180,180,139)
i01.moveTorso(64,80,90)
sleep(2)
##release clothes
i01.setHandSpeed("left", 1.0, 0.80, 0.80, 0.80, 1.0, 0.80)
i01.setHandSpeed("right", 1.0, 0.80, 0.80, 0.80, 0.80, 0.80)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.90, 0.80)
i01.setTorsoSpeed(1.0,0.80,1.0)
i01.moveHead(38,43,51,10,65)
i01.moveArm("left",97,51,25,22)
i01.moveArm("right",90,135,22,36)
i01.moveHand("left",92,33,37,71,66,25)
i01.moveHand("right",0,0,0,0,0,139)
i01.moveTorso(66,80,90)
sleep(4)
##Relax
i01.moveHead(80,86,82,78,65)
i01.moveArm("left",5,84,28,14)
i01.moveArm("right",5,82,28,16)
i01.moveHand("left",92,33,37,71,66,25)
i01.moveHand("right",81,66,82,60,105,113)
i01.moveTorso(95,90,90)
| 37.358491
| 66
| 0.582323
| 393
| 1,980
| 2.933842
| 0.160305
| 0.079792
| 0.062446
| 0.083261
| 0.747615
| 0.743278
| 0.722463
| 0.703382
| 0.693842
| 0.693842
| 0
| 0.311486
| 0.190909
| 1,980
| 53
| 67
| 37.358491
| 0.40824
| 0.024747
| 0
| 0.574468
| 0
| 0
| 0.067308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021277
| true
| 0
| 0
| 0
| 0.021277
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8379194135555d1802d95b6a18f36f1fdeaa4e09
| 113
|
py
|
Python
|
paltas/Utils/__init__.py
|
swagnercarena/paltas
|
62495381e406dfb508a1ace4aa69cbe9a4207e38
|
[
"MIT"
] | 5
|
2022-02-11T19:58:03.000Z
|
2022-03-07T19:45:23.000Z
|
paltas/Utils/__init__.py
|
swagnercarena/paltas
|
62495381e406dfb508a1ace4aa69cbe9a4207e38
|
[
"MIT"
] | 8
|
2022-02-01T00:42:34.000Z
|
2022-03-31T17:42:55.000Z
|
paltas/Utils/__init__.py
|
swagnercarena/paltas
|
62495381e406dfb508a1ace4aa69cbe9a4207e38
|
[
"MIT"
] | 1
|
2022-02-11T19:54:53.000Z
|
2022-02-11T19:54:53.000Z
|
from . import cosmology_utils
from . import power_law
from . import hubble_utils
from . import lenstronomy_utils
| 22.6
| 31
| 0.823009
| 16
| 113
| 5.5625
| 0.5
| 0.449438
| 0.337079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141593
| 113
| 4
| 32
| 28.25
| 0.917526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
83ab268e6c669af49240a4f887ae1093265bdda9
| 48,812
|
py
|
Python
|
tests/intra_stack_registration_test.py
|
martaranzini/NiftyMIC
|
6bd3c914dad8f2983e84ef009b944c429e1fafb3
|
[
"BSD-3-Clause"
] | 86
|
2017-11-23T01:37:42.000Z
|
2022-03-10T01:46:48.000Z
|
tests/intra_stack_registration_test.py
|
martaranzini/NiftyMIC
|
6bd3c914dad8f2983e84ef009b944c429e1fafb3
|
[
"BSD-3-Clause"
] | 20
|
2018-10-26T04:14:53.000Z
|
2022-03-31T07:44:58.000Z
|
tests/intra_stack_registration_test.py
|
martaranzini/NiftyMIC
|
6bd3c914dad8f2983e84ef009b944c429e1fafb3
|
[
"BSD-3-Clause"
] | 23
|
2018-01-26T12:56:37.000Z
|
2022-01-24T05:20:18.000Z
|
##
# \file intra_stack_registration_test.py
# \brief Class containing unit tests for module IntraStackRegistration
#
# \author Michael Ebner (michael.ebner.14@ucl.ac.uk)
# \date October 2016
# Import libraries
import SimpleITK as sitk
import itk
import numpy as np
import unittest
import sys
import os
from scipy.ndimage import imread
import pysitk.simple_itk_helper as sitkh
import pysitk.python_helper as ph
# Import modules
import niftymic.base.stack as st
import niftymic.registration.intra_stack_registration as inplanereg
from niftymic.definitions import DIR_TEST
def get_inplane_corrupted_stack(stack,
angle_z,
center_2D,
translation_2D,
scale=1,
intensity_scale=1,
intensity_bias=0,
debug=0,
random=False):
# Convert to 3D:
translation_3D = np.zeros(3)
translation_3D[0:-1] = translation_2D
center_3D = np.zeros(3)
center_3D[0:-1] = center_2D
# Transform to align physical coordinate system with stack-coordinate
# system
affine_centering_sitk = sitk.AffineTransform(3)
affine_centering_sitk.SetMatrix(stack.sitk.GetDirection())
affine_centering_sitk.SetTranslation(stack.sitk.GetOrigin())
# Corrupt first stack towards positive direction
if random:
angle_z_1 = -angle_z*np.random.rand(1)[0]
else:
angle_z_1 = -angle_z
in_plane_motion_sitk = sitk.Euler3DTransform()
in_plane_motion_sitk.SetRotation(0, 0, angle_z_1)
in_plane_motion_sitk.SetCenter(center_3D)
in_plane_motion_sitk.SetTranslation(translation_3D)
motion_sitk = sitkh.get_composite_sitk_affine_transform(
in_plane_motion_sitk, sitk.AffineTransform(
affine_centering_sitk.GetInverse()))
motion_sitk = sitkh.get_composite_sitk_affine_transform(
affine_centering_sitk, motion_sitk)
stack_corrupted_resampled_sitk = sitk.Resample(
stack.sitk, motion_sitk, sitk.sitkLinear)
stack_corrupted_resampled_sitk_mask = sitk.Resample(
stack.sitk_mask, motion_sitk, sitk.sitkLinear)
# Corrupt first stack towards negative direction
if random:
angle_z_2 = -angle_z*np.random.rand(1)[0]
else:
angle_z_2 = -angle_z
in_plane_motion_2_sitk = sitk.Euler3DTransform()
in_plane_motion_2_sitk.SetRotation(0, 0, angle_z_2)
in_plane_motion_2_sitk.SetCenter(center_3D)
in_plane_motion_2_sitk.SetTranslation(-translation_3D)
motion_2_sitk = sitkh.get_composite_sitk_affine_transform(
in_plane_motion_2_sitk, sitk.AffineTransform(
affine_centering_sitk.GetInverse()))
motion_2_sitk = sitkh.get_composite_sitk_affine_transform(
affine_centering_sitk, motion_2_sitk)
stack_corrupted_2_resampled_sitk = sitk.Resample(
stack.sitk, motion_2_sitk, sitk.sitkLinear)
stack_corrupted_2_resampled_sitk_mask = sitk.Resample(
stack.sitk_mask, motion_2_sitk, sitk.sitkLinear)
# Create stack based on those two corrupted stacks
nda = sitk.GetArrayFromImage(stack_corrupted_resampled_sitk)
nda_mask = sitk.GetArrayFromImage(stack_corrupted_resampled_sitk_mask)
nda_neg = sitk.GetArrayFromImage(stack_corrupted_2_resampled_sitk)
nda_neg_mask = sitk.GetArrayFromImage(
stack_corrupted_2_resampled_sitk_mask)
for i in range(0, stack.sitk.GetDepth(), 2):
nda[i, :, :] = nda_neg[i, :, :]
nda_mask[i, :, :] = nda_neg_mask[i, :, :]
stack_corrupted_sitk = sitk.GetImageFromArray(
(nda-intensity_bias)/intensity_scale)
stack_corrupted_sitk_mask = sitk.GetImageFromArray(nda_mask)
stack_corrupted_sitk.CopyInformation(stack.sitk)
stack_corrupted_sitk_mask.CopyInformation(stack.sitk_mask)
# Debug: Show corrupted stacks (before scaling)
if debug:
sitkh.show_sitk_image(
[stack.sitk,
stack_corrupted_resampled_sitk,
stack_corrupted_2_resampled_sitk,
stack_corrupted_sitk],
title=["original",
"corrupted_1",
"corrupted_2",
"corrupted_final_from_1_and_2"])
# Update in-plane scaling
spacing = np.array(stack.sitk.GetSpacing())
spacing[0:-1] /= scale
stack_corrupted_sitk.SetSpacing(spacing)
stack_corrupted_sitk_mask.SetSpacing(spacing)
# Create Stack object
stack_corrupted = st.Stack.from_sitk_image(
stack_corrupted_sitk, "stack_corrupted", stack_corrupted_sitk_mask)
# Debug: Show corrupted stacks (after scaling)
if debug:
stack_corrupted_resampled_sitk = sitk.Resample(
stack_corrupted.sitk, stack.sitk)
sitkh.show_sitk_image(
[stack.sitk,
stack_corrupted_resampled_sitk],
title=["original", "corrupted"])
return stack_corrupted, motion_sitk, motion_2_sitk
class IntraStackRegistrationTest(unittest.TestCase):
# Specify input data
dir_test_data = DIR_TEST
accuracy = 6
def setUp(self):
pass
##
# Test whether the function
# _get_initial_transforms_and_parameters_geometry_moments
# works.
# \date 2016-11-09 23:59:25+0000
#
# \param self The object
#
def test_initial_transform_computation_1(self):
# Create stack of slice with only a dot in the middle
shape_xy = 15
shape_z = 15
# Original stack
nda_3D = np.zeros((shape_z, shape_xy, shape_xy))
nda_3D[:, 0, 0] = 1
stack_sitk = sitk.GetImageFromArray(nda_3D)
stack = st.Stack.from_sitk_image(stack_sitk, "stack")
# Create 'motion corrupted stack', i.e. point moves diagonally with
# step one
nda_3D_corruped = np.zeros_like(nda_3D)
for i in range(0, shape_z):
nda_3D_corruped[i, i, i] = 1
stack_corrupted_sitk = sitk.GetImageFromArray(nda_3D_corruped)
stack_corrupted = st.Stack.from_sitk_image(
stack_corrupted_sitk, "stack_corrupted")
# stack_corrupted.show_slices()
# sitkh.show_stacks([stack, stack_corrupted])
# Ground truth-parameter: zero angle but translation = (1, 1) from one
# slice to the next
parameters = np.ones((shape_z, 3))
parameters[:, 0] = 0
for i in range(0, shape_z):
parameters[i, :] *= i
# 1) Get initial transform in case no reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted)
inplane_registration.set_transform_initializer_type("moments")
# inplane_registration.set_transform_initializer_type("identity")
inplane_registration._run_registration_pipeline_initialization()
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
# 2) Get initial transform in case reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
inplane_registration.set_transform_initializer_type("moments")
# inplane_registration.set_image_transform_reference_fit_term("gradient_magnitude")
# inplane_registration.set_transform_initializer_type("identity")
inplane_registration._run_registration_pipeline_initialization()
inplane_registration._apply_motion_correction()
# stack_corrected = inplane_registration.get_corrected_stack()
# sitkh.show_stacks([stack, stack_corrupted, stack_corrected.get_resampled_stack_from_slices(interpolator="Linear")])
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
# print(nda_diff)
# print(parameters)
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
##
# Test whether the function
# _get_initial_transforms_and_parameters_geometry_moments
# works.
# \date 2016-11-09 23:59:25+0000
#
# \param self The object
#
def test_initial_transform_computation_2(self):
# Create stack of slice with a pyramid in the middle
shape_xy = 250
shape_z = 15
intensity_mask = 10
length = 50
nda_2D = ph.read_image(os.path.join(
DIR_TEST, "2D_Pyramid_Midpoint_" + str(length) + ".png"))
# Original stack
nda_3D = np.zeros((shape_z, shape_xy, shape_xy))
i0 = (shape_xy - length) / 2
for i in range(0, shape_z):
nda_3D[i, i0:-i0, i0:-i0] = nda_2D
stack_sitk = sitk.GetImageFromArray(nda_3D)
stack = st.Stack.from_sitk_image(stack_sitk, "stack")
# Create 'motion corrupted stack', i.e. in-plane translation, and
# associated ground-truth parameters
parameters = np.zeros((shape_z, 3))
parameters[:, 0] = 0
nda_3D_corrupted = np.zeros_like(nda_3D)
nda_3D_corrupted[0, :, :] = nda_3D[0, :, :]
for i in range(1, shape_z):
# Get random translation
[tx, ty] = np.random.randint(0, 50, 2)
# Get image based on corruption
inew = i0 + tx
jnew = i0 + ty
nda_3D_corrupted[i, inew:, jnew:] = \
nda_3D[i, i0:2*i0+length-tx, i0:2*i0+length-ty]
# Get ground-truth parameters
parameters[i, 1] = ty
parameters[i, 2] = tx
stack_corrupted_sitk = sitk.GetImageFromArray(nda_3D_corrupted)
stack_corrupted = st.Stack.from_sitk_image(
stack_corrupted_sitk, "stack_corrupted")
# stack_corrupted.show_slices()
# sitkh.show_stacks([stack, stack_corrupted])
# 1) Get initial transform in case no reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted)
inplane_registration.set_transform_initializer_type("moments")
# inplane_registration.set_transform_initializer_type("identity")
# inplane_registration.set_transform_initializer_type("geometry")
inplane_registration._run_registration_pipeline_initialization()
# Debug:
# inplane_registration._apply_motion_correction()
# stack_corrected = inplane_registration.get_corrected_stack()
# sitkh.show_stacks(
# [stack,
# stack_corrupted,
# stack_corrected.get_resampled_stack_from_slices(
# interpolator="Linear", filename="stack_corrected")])
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
# 2) Get initial transform in case reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
inplane_registration.set_transform_initializer_type("moments")
# inplane_registration.set_transform_initializer_type("identity")
inplane_registration._run_registration_pipeline_initialization()
# Debug:
# inplane_registration._apply_motion_correction()
# stack_corrected = inplane_registration.get_corrected_stack()
# sitkh.show_stacks(
# [stack,
# stack_corrupted,
# stack_corrected.get_resampled_stack_from_slices(
# interpolator="Linear", filename="stack_corrected")])
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
# print(nda_diff)
# print(parameters)
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
##
# Test whether the function
# _get_initial_transforms_and_parameters_geometry_moments
# works.
# \date 2016-11-09 23:59:25+0000
#
# \param self The object
#
def test_initial_transform_computation_3(self):
# Create stack of slice with a pyramid in the middle
shape_xy = 250
shape_z = 15
intensity_mask = 10
length = 50
nda_2D = ph.read_image(os.path.join(
DIR_TEST, "2D_Pyramid_Midpoint_" + str(length) + ".png"))
# Original stack
nda_3D = np.zeros((shape_z, shape_xy, shape_xy))
i0 = (shape_xy - length) / 2
for i in range(0, shape_z):
nda_3D[i, i0:-i0, i0:-i0] = nda_2D
nda_3D_mask = np.array(nda_3D).astype(np.uint8)
nda_3D_mask[np.where(nda_3D_mask <= intensity_mask)] = 0
nda_3D_mask[np.where(nda_3D_mask > intensity_mask)] = 1
# Add additional weight s.t. initialization without mask fails
for i in range(0, shape_z):
nda_3D[i, -i0:, -i0:] = 10
stack_sitk = sitk.GetImageFromArray(nda_3D)
stack_sitk_mask = sitk.GetImageFromArray(nda_3D_mask)
stack = st.Stack.from_sitk_image(stack_sitk, "stack", stack_sitk_mask)
# Create 'motion corrupted stack', i.e. in-plane translation, and
# associated ground-truth parameters
parameters = np.zeros((shape_z, 3))
parameters[:, 0] = 0
nda_3D_corrupted = np.zeros_like(nda_3D)
nda_3D_corrupted[0, :, :] = nda_3D[0, :, :]
nda_3D_corrupted_mask = np.zeros_like(nda_3D_mask)
nda_3D_corrupted_mask[0, :, :] = nda_3D_mask[0, :, :]
for i in range(1, shape_z):
# Get random translation
[tx, ty] = np.random.randint(0, 50, 2)
# Get image based on corruption
inew = i0 + tx
jnew = i0 + ty
nda_3D_corrupted[i, inew:, jnew:] = \
nda_3D[i, i0:2*i0+length-tx, i0:2*i0+length-ty]
nda_3D_corrupted_mask[i, inew:, jnew:] = \
nda_3D_mask[i, i0:2*i0+length-tx, i0:2*i0+length-ty]
# Get ground-truth parameters
parameters[i, 1] = ty
parameters[i, 2] = tx
# nda_3D_corrupted = np.zeros_like(nda_3D)
# nda_3D_corrupted[0, i0:-i0, i0:-i0] = nda_2D
# for i in range(1, shape_z):
# # Get random translation
# [tx, ty] = np.random.randint(0, 50, 2)
# # Get image based on corruption
# inew = i0 + tx
# jnew = i0 + ty
# nda_3D_corrupted[i, inew:inew+length, jnew:jnew+length] = nda_2D
# # Get ground-truth parameters
# parameters[i, 1] = ty
# parameters[i, 2] = tx
stack_corrupted_sitk = sitk.GetImageFromArray(nda_3D_corrupted)
stack_corrupted_sitk_mask = sitk.GetImageFromArray(
nda_3D_corrupted_mask)
stack_corrupted = st.Stack.from_sitk_image(
stack_corrupted_sitk, "stack_corrupted", stack_corrupted_sitk_mask)
# stack_corrupted.show(1)
# stack_corrupted.show_slices()
# sitkh.show_stacks([stack, stack_corrupted],
# segmentation=stack)
# 1) Get initial transform in case no reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted,
use_stack_mask=True,
)
inplane_registration.set_transform_initializer_type("moments")
# inplane_registration.set_transform_initializer_type("identity")
# inplane_registration.set_transform_initializer_type("geometry")
inplane_registration._run_registration_pipeline_initialization()
# Debug:
# inplane_registration._apply_motion_correction()
# stack_corrected = inplane_registration.get_corrected_stack()
# sitkh.show_stacks(
# [stack,
# stack_corrupted,
# stack_corrected.get_resampled_stack_from_slices(
# interpolator="Linear", filename="stack_corrected")])
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
# 2) Get initial transform in case reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
inplane_registration.set_transform_initializer_type("moments")
# inplane_registration.set_transform_initializer_type("identity")
inplane_registration.use_reference_mask(True)
inplane_registration.use_stack_mask_reference_fit_term(True)
inplane_registration._run_registration_pipeline_initialization()
# Debug:
# inplane_registration._apply_motion_correction()
# stack_corrected = inplane_registration.get_corrected_stack()
# sitkh.show_stacks(
# [stack,
# stack_corrupted,
# stack_corrected.get_resampled_stack_from_slices(
# interpolator="Linear", filename="stack_corrected")])
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
# print(nda_diff)
# print(parameters)
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
##
# Test that initial intensity coefficients are computed
# correctly
# \date 2016-11-10 04:28:06+0000
#
# \param self The object
#
def test_initial_intensity_coefficient_computation(self):
# Create stack
shape_z = 15
nda_2D = imread(self.dir_test_data + "2D_Lena_256.png", flatten=True)
nda_3D = np.tile(nda_2D, (shape_z, 1, 1)).astype('double')
stack_sitk = sitk.GetImageFromArray(nda_3D)
stack = st.Stack.from_sitk_image(stack_sitk, "Lena")
# 1) Create linearly corrupted intensity stack
nda_3D_corruped = np.zeros_like(nda_3D)
for i in range(0, shape_z):
nda_3D_corruped[i, :, :] = nda_3D[i, :, :]/(i+1.)
stack_corrupted_sitk = sitk.GetImageFromArray(nda_3D_corruped)
stack_corrupted = st.Stack.from_sitk_image(
stack_corrupted_sitk, "stack_corrupted")
# stack_corrupted.show_slices()
# sitkh.show_stacks([stack, stack_corrupted])
# Ground truth-parameter: zero angle but translation = (1, 1) from one
# slice to the next
parameters = np.zeros((shape_z, 4))
parameters[:, 0] = 0
for i in range(0, shape_z):
parameters[i, 3:] = 1*(i+1.) # intensity
# Get initial transform in case no reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
# inplane_registration.set_transform_initializer_type("moments")
inplane_registration.set_intensity_correction_type_slice_neighbour_fit(
"linear")
inplane_registration.set_intensity_correction_initializer_type(
"linear")
inplane_registration._run_registration_pipeline_initialization()
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
# 2) Create affinely corrupted intensity stack
# HINT: In case of individual slice correction is active!!
nda_3D_corruped = np.zeros_like(nda_3D)
for i in range(0, shape_z):
nda_3D_corruped[i, :, :] = (nda_3D[i, :, :]-10*i)/(i+1.)
stack_corrupted_sitk = sitk.GetImageFromArray(nda_3D_corruped)
stack_corrupted = st.Stack.from_sitk_image(
stack_corrupted_sitk, "stack_corrupted")
# stack_corrupted.show_slices()
# sitkh.show_stacks([stack, stack_corrupted])
# Ground truth-parameter: zero angle but translation = (1, 1) from one
# slice to the next
parameters = np.zeros((shape_z, 5))
parameters[:, 0] = 0
for i in range(0, shape_z):
parameters[i, 3:] = (i+1, 10*i) # intensity
# Get initial transform in case no reference is given
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
# inplane_registration.set_transform_initializer_type("moments")
inplane_registration.set_intensity_correction_type_slice_neighbour_fit(
"affine")
inplane_registration.set_intensity_correction_initializer_type(
"affine")
inplane_registration._run_registration_pipeline_initialization()
parameters_est = inplane_registration.get_parameters()
nda_diff = parameters - parameters_est
self.assertEqual(np.round(
np.linalg.norm(nda_diff), decimals=self.accuracy), 0)
##
# Verify that in-plane rigid registration works
# \date 2016-11-02 21:56:19+0000
#
# Verify that in-plane rigid registration works, i.e. test
# 1) registration parameters are close to ground truth (up to zero dp)
# 2) affine transformations for each slice correctly describes the
# registration
#
# \param self The object
#
def test_inplane_rigid_alignment_to_neighbour(self):
filename_stack = "fetal_brain_0"
# filename_recon = "FetalBrain_reconstruction_3stacks_myAlg"
# stack_sitk = sitk.ReadImage(self.dir_test_data + filename_stack + ".nii.gz")
# recon_sitk = sitk.ReadImage(self.dir_test_data + filename_recon + ".nii.gz")
# recon_resampled_sitk = sitk.Resample(recon_sitk, stack_sitk)
# stack = st.Stack.from_sitk_image(recon_resampled_sitk, "original")
stack = st.Stack.from_filename(
os.path.join(self.dir_test_data, filename_stack + ".nii.gz"),
os.path.join(self.dir_test_data, filename_stack + "_mask.nii.gz")
)
nda = sitk.GetArrayFromImage(stack.sitk)
nda_mask = sitk.GetArrayFromImage(stack.sitk_mask)
i = 5
nda_slice = np.array(nda[i, :, :])
nda_mask_slice = np.array(nda_mask[i, :, :])
for i in range(0, nda.shape[0]):
nda[i, :, :] = nda_slice
nda_mask[i, :, :] = nda_mask_slice
stack_sitk = sitk.GetImageFromArray(nda)
stack_sitk_mask = sitk.GetImageFromArray(nda_mask)
stack_sitk.CopyInformation(stack.sitk)
stack_sitk_mask.CopyInformation(stack.sitk_mask)
stack = st.Stack.from_sitk_image(
stack_sitk, stack.get_filename(), stack_sitk_mask)
# Create in-plane motion corruption
angle_z = 0.1
center_2D = (0, 0)
translation_2D = np.array([1, -2])
# Get corrupted stack and corresponding motions
stack_corrupted, motion_sitk, motion_2_sitk = get_inplane_corrupted_stack(
stack, angle_z, center_2D, translation_2D, random=True)
# stack.show(1)
# stack_corrupted.show(1)
# Perform in-plane rigid registration
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
# inplane_registration = inplanereg.IntraStackRegistration(stack_corrupted)
inplane_registration.set_transform_initializer_type("moments")
inplane_registration.set_optimizer_iter_max(20)
inplane_registration.set_alpha_neighbour(1)
inplane_registration.set_alpha_reference(2)
# inplane_registration.use_parameter_normalization(True)
inplane_registration.use_stack_mask(1)
inplane_registration.use_reference_mask(0)
# inplane_registration.set_optimizer_loss("linear") # linear, soft_l1,
# huber
inplane_registration.set_optimizer_method("trf") # trf, lm, dogbox
# inplane_registration._run_registration_pipeline_initialization()
# inplane_registration._apply_motion_correction()
inplane_registration.use_verbose(True)
inplane_registration.run()
inplane_registration.print_statistics()
stack_registered = inplane_registration.get_corrected_stack()
parameters = inplane_registration.get_parameters()
sitkh.show_stacks([stack, stack_corrupted, stack_registered.get_resampled_stack_from_slices(
interpolator="Linear")])
# self.assertEqual(np.round(
# np.linalg.norm(nda_diff)
# , decimals = self.accuracy), 0)
# 2) Test slice transforms
slice_transforms_sitk = inplane_registration.get_slice_transforms_sitk()
stack_tmp = st.Stack.from_stack(stack_corrupted)
stack_tmp.update_motion_correction_of_slices(slice_transforms_sitk)
stack_diff_sitk = stack_tmp.get_resampled_stack_from_slices(
resampling_grid=stack.sitk).sitk - stack_registered.get_resampled_stack_from_slices(resampling_grid=stack.sitk).sitk
stack_diff_nda = sitk.GetArrayFromImage(stack_diff_sitk)
self.assertEqual(np.round(
np.linalg.norm(stack_diff_nda), decimals=8), 0)
def test_inplane_rigid_alignment_to_reference(self):
filename_stack = "fetal_brain_0"
# filename_recon = "FetalBrain_reconstruction_3stacks_myAlg"
# stack_sitk = sitk.ReadImage(self.dir_test_data + filename_stack + ".nii.gz")
# recon_sitk = sitk.ReadImage(self.dir_test_data + filename_recon + ".nii.gz")
# recon_resampled_sitk = sitk.Resample(recon_sitk, stack_sitk)
# stack = st.Stack.from_sitk_image(recon_resampled_sitk, "original")
stack = st.Stack.from_filename(
os.path.join(self.dir_test_data, filename_stack + ".nii.gz"),
os.path.join(self.dir_test_data, filename_stack + "_mask.nii.gz")
)
# Create in-plane motion corruption
angle_z = 0.1
center_2D = (0, 0)
translation_2D = np.array([1, -2])
# Get corrupted stack and corresponding motions
stack_corrupted, motion_sitk, motion_2_sitk = get_inplane_corrupted_stack(
stack, angle_z, center_2D, translation_2D)
# stack.show(1)
# stack_corrupted.show(1)
# Perform in-plane rigid registration
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
# inplane_registration = inplanereg.IntraStackRegistration(stack_corrupted)
inplane_registration.set_transform_initializer_type("moments")
inplane_registration.set_optimizer_iter_max(10)
inplane_registration.set_alpha_neighbour(0)
inplane_registration.set_alpha_parameter(0)
inplane_registration.use_stack_mask(1)
inplane_registration.use_reference_mask(0)
inplane_registration.set_optimizer_loss("linear")
# inplane_registration.set_optimizer_method("trf")
# inplane_registration._run_registration_pipeline_initialization()
# inplane_registration._apply_motion_correction()
# inplane_registration.use_verbose(True)
inplane_registration.run()
inplane_registration.print_statistics()
stack_registered = inplane_registration.get_corrected_stack()
parameters = inplane_registration.get_parameters()
sitkh.show_stacks([stack, stack_corrupted, stack_registered.get_resampled_stack_from_slices(
interpolator="Linear", resampling_grid=stack.sitk)])
print(parameters)
# self.assertEqual(np.round(
# np.linalg.norm(nda_diff)
# , decimals = self.accuracy), 0)
# 2) Test slice transforms
slice_transforms_sitk = inplane_registration.get_slice_transforms_sitk()
stack_tmp = st.Stack.from_stack(stack_corrupted)
stack_tmp.update_motion_correction_of_slices(slice_transforms_sitk)
stack_diff_sitk = stack_tmp.get_resampled_stack_from_slices(
resampling_grid=stack.sitk).sitk - stack_registered.get_resampled_stack_from_slices(resampling_grid=stack.sitk).sitk
stack_diff_nda = sitk.GetArrayFromImage(stack_diff_sitk)
self.assertEqual(np.round(
np.linalg.norm(stack_diff_nda), decimals=8), 0)
def test_inplane_rigid_alignment_to_reference_with_intensity_correction_linear(self):
filename_stack = "fetal_brain_0"
filename_recon = "FetalBrain_reconstruction_3stacks_myAlg"
stack_sitk = sitk.ReadImage(
self.dir_test_data + filename_stack + ".nii.gz")
recon_sitk = sitk.ReadImage(
self.dir_test_data + filename_recon + ".nii.gz")
recon_resampled_sitk = sitk.Resample(recon_sitk, stack_sitk)
stack = st.Stack.from_sitk_image(recon_resampled_sitk, "original")
# Create in-plane motion corruption
angle_z = 0.05
center_2D = (0, 0)
translation_2D = np.array([1, -2])
intensity_scale = 10
intensity_bias = 0
# Get corrupted stack and corresponding motions
stack_corrupted, motion_sitk, motion_2_sitk = get_inplane_corrupted_stack(
stack, angle_z, center_2D, translation_2D, intensity_scale=intensity_scale, intensity_bias=intensity_bias)
# Perform in-plane rigid registration
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
# inplane_registration = inplanereg.IntraStackRegistration(stack_corrupted)
inplane_registration.set_transform_initializer_type("moments")
inplane_registration.set_transform_type("rigid")
inplane_registration.set_intensity_correction_initializer_type(
"linear")
inplane_registration.set_intensity_correction_type_slice_neighbour_fit(
"linear")
inplane_registration.set_intensity_correction_type_reference_fit(
"linear")
inplane_registration.set_optimizer_loss(
"linear") # linear, soft_l1, huber
inplane_registration.use_parameter_normalization(True)
inplane_registration.use_verbose(True)
inplane_registration.set_alpha_reference(1)
inplane_registration.set_alpha_neighbour(0)
inplane_registration.set_alpha_parameter(0)
inplane_registration.set_optimizer_iter_max(30)
inplane_registration.use_verbose(True)
inplane_registration.run()
inplane_registration.print_statistics()
stack_registered = inplane_registration.get_corrected_stack()
parameters = inplane_registration.get_parameters()
sitkh.show_stacks([stack, stack_corrupted, stack_registered.get_resampled_stack_from_slices(
resampling_grid=None, interpolator="Linear")])
print("Final parameters:")
print(parameters)
self.assertEqual(np.round(
np.linalg.norm(parameters[:, -1] - intensity_scale), decimals=0), 0)
# 2) Test slice transforms
slice_transforms_sitk = inplane_registration.get_slice_transforms_sitk()
stack_tmp = st.Stack.from_stack(stack_corrupted)
stack_tmp.update_motion_correction_of_slices(slice_transforms_sitk)
stack_diff_sitk = stack_tmp.get_resampled_stack_from_slices(
resampling_grid=stack.sitk).sitk - stack_registered.get_resampled_stack_from_slices(resampling_grid=stack.sitk).sitk
stack_diff_nda = sitk.GetArrayFromImage(stack_diff_sitk)
self.assertEqual(np.round(
np.linalg.norm(stack_diff_nda), decimals=8), 0)
##
# \bug There is some issue with slice based and uniform intensity correction.
# Unit test needs to be fixed at some point
# \date 2017-07-12 12:40:01+0100
#
# \param self The object
#
def test_inplane_rigid_alignment_to_reference_with_intensity_correction_affine(self):
filename_stack = "fetal_brain_0"
filename_recon = "FetalBrain_reconstruction_3stacks_myAlg"
stack_sitk = sitk.ReadImage(
self.dir_test_data + filename_stack + ".nii.gz")
recon_sitk = sitk.ReadImage(
self.dir_test_data + filename_recon + ".nii.gz")
recon_resampled_sitk = sitk.Resample(recon_sitk, stack_sitk)
stack = st.Stack.from_sitk_image(recon_resampled_sitk, "original")
# Create in-plane motion corruption
angle_z = 0.01
center_2D = (0, 0)
translation_2D = np.array([1, 0])
intensity_scale = 5
intensity_bias = 5
# Get corrupted stack and corresponding motions
stack_corrupted, motion_sitk, motion_2_sitk = get_inplane_corrupted_stack(
stack, angle_z, center_2D, translation_2D, intensity_scale=intensity_scale, intensity_bias=intensity_bias)
# Perform in-plane rigid registration
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
# inplane_registration = inplanereg.IntraStackRegistration(stack_corrupted)
inplane_registration.set_transform_type("rigid")
inplane_registration.set_transform_initializer_type("identity")
inplane_registration.set_optimizer_loss("linear")
inplane_registration.set_intensity_correction_initializer_type(
"affine")
inplane_registration.set_intensity_correction_type_slice_neighbour_fit(
"affine")
inplane_registration.use_parameter_normalization(True)
inplane_registration.use_verbose(True)
inplane_registration.use_stack_mask(True)
inplane_registration.set_prior_intensity_coefficients(
(intensity_scale-0.4, intensity_bias+0.7))
inplane_registration.set_alpha_reference(1)
inplane_registration.set_alpha_neighbour(1)
inplane_registration.set_alpha_parameter(1e3)
inplane_registration.set_optimizer_iter_max(15)
inplane_registration.use_verbose(True)
inplane_registration.run()
inplane_registration.print_statistics()
stack_registered = inplane_registration.get_corrected_stack()
parameters = inplane_registration.get_parameters()
sitkh.show_stacks([stack, stack_corrupted, stack_registered.get_resampled_stack_from_slices(
resampling_grid=None, interpolator="Linear")])
self.assertEqual(np.round(
np.linalg.norm(parameters[:, -2:] - np.array([intensity_scale, intensity_bias])), decimals=0), 0)
# 2) Test slice transforms
slice_transforms_sitk = inplane_registration.get_slice_transforms_sitk()
stack_tmp = st.Stack.from_stack(stack_corrupted)
stack_tmp.update_motion_correction_of_slices(slice_transforms_sitk)
stack_diff_sitk = stack_tmp.get_resampled_stack_from_slices(
resampling_grid=stack.sitk).sitk - stack_registered.get_resampled_stack_from_slices(resampling_grid=stack.sitk).sitk
stack_diff_nda = sitk.GetArrayFromImage(stack_diff_sitk)
self.assertEqual(np.round(
np.linalg.norm(stack_diff_nda), decimals=8), 0)
def test_inplane_similarity_alignment_to_reference(self):
filename_stack = "fetal_brain_0"
# filename_stack = "3D_SheppLoganPhantom_64"
stack = st.Stack.from_filename(
os.path.join(self.dir_test_data, filename_stack + ".nii.gz"),
os.path.join(self.dir_test_data, filename_stack + "_mask.nii.gz")
)
# stack.show(1)
nda = sitk.GetArrayFromImage(stack.sitk)
nda_mask = sitk.GetArrayFromImage(stack.sitk_mask)
i = 5
nda_slice = np.array(nda[i, :, :])
nda_mask_slice = np.array(nda_mask[i, :, :])
for i in range(0, nda.shape[0]):
nda[i, :, :] = nda_slice
nda_mask[i, :, :] = nda_mask_slice
stack_sitk = sitk.GetImageFromArray(nda)
stack_sitk_mask = sitk.GetImageFromArray(nda_mask)
stack_sitk.CopyInformation(stack.sitk)
stack_sitk_mask.CopyInformation(stack.sitk_mask)
stack = st.Stack.from_sitk_image(
stack_sitk, stack.get_filename(), stack_sitk_mask)
# Create in-plane motion corruption
scale = 1.2
angle_z = 0.05
center_2D = (0, 0)
# translation_2D = np.array([0,0])
translation_2D = np.array([1, -1])
intensity_scale = 10
intensity_bias = 50
# Get corrupted stack and corresponding motions
stack_corrupted, motion_sitk, motion_2_sitk = get_inplane_corrupted_stack(
stack, angle_z, center_2D, translation_2D, scale=scale, intensity_scale=intensity_scale, intensity_bias=intensity_bias, debug=0)
# stack_corrupted.show(1)
# stack.show(1)
# Perform in-plane rigid registrations
inplane_registration = inplanereg.IntraStackRegistration(
stack=stack_corrupted, reference=stack)
# inplane_registration = inplanereg.IntraStackRegistration(stack_corrupted)
inplane_registration.set_transform_initializer_type("geometry")
# inplane_registration.set_transform_initializer_type("identity")
inplane_registration.set_intensity_correction_initializer_type(
"affine")
inplane_registration.set_transform_type("similarity")
inplane_registration.set_interpolator("Linear")
inplane_registration.set_optimizer_loss("linear")
# inplane_registration.use_reference_mask(True)
inplane_registration.use_stack_mask(True)
inplane_registration.use_parameter_normalization(True)
inplane_registration.set_prior_scale(1/scale)
inplane_registration.set_prior_intensity_coefficients(
(intensity_scale, intensity_bias))
inplane_registration.set_intensity_correction_type_slice_neighbour_fit(
"affine")
inplane_registration.set_intensity_correction_type_reference_fit(
"affine")
inplane_registration.use_verbose(True)
inplane_registration.set_alpha_reference(1)
inplane_registration.set_alpha_neighbour(0)
inplane_registration.set_alpha_parameter(1e10)
inplane_registration.set_optimizer_iter_max(20)
inplane_registration.use_verbose(True)
inplane_registration.run()
inplane_registration.print_statistics()
# inplane_registration._run_registration_pipeline_initialization()
# inplane_registration._apply_motion_correction()
stack_registered = inplane_registration.get_corrected_stack()
parameters = inplane_registration.get_parameters()
sitkh.show_sitk_image([stack.sitk, stack_corrupted.get_resampled_stack_from_slices(interpolator="Linear", resampling_grid=stack.sitk).sitk,
stack_registered.get_resampled_stack_from_slices(interpolator="Linear", resampling_grid=stack.sitk).sitk], label=["original", "corrupted", "recovered"])
# self.assertEqual(np.round(
# np.linalg.norm(nda_diff)
# , decimals = self.accuracy), 0)
# 2) Test slice transforms
slice_transforms_sitk = inplane_registration.get_slice_transforms_sitk()
stack_tmp = st.Stack.from_stack(stack_corrupted)
stack_tmp.update_motion_correction_of_slices(slice_transforms_sitk)
stack_diff_sitk = stack_tmp.get_resampled_stack_from_slices(
resampling_grid=stack.sitk).sitk - stack_registered.get_resampled_stack_from_slices(resampling_grid=stack.sitk).sitk
stack_diff_nda = sitk.GetArrayFromImage(stack_diff_sitk)
self.assertEqual(np.round(
np.linalg.norm(stack_diff_nda), decimals=8), 0)
def test_inplane_rigid_alignment_to_reference_multimodal(self):
filename_stack = "fetal_brain_0"
filename_recon = "FetalBrain_reconstruction_3stacks_myAlg"
stack_tmp = st.Stack.from_filename(
os.path.join(self.dir_test_data, filename_stack + ".nii.gz"),
os.path.join(self.dir_test_data, filename_stack + "_mask.nii.gz")
)
recon = st.Stack.from_filename(
os.path.join(self.dir_test_data, filename_recon)
)
recon_sitk = recon.get_resampled_stack_from_slices(
resampling_grid=stack_tmp.sitk, interpolator="Linear").sitk
stack = st.Stack.from_sitk_image(
recon_sitk, "original", stack_tmp.sitk_mask)
# recon_resampled_sitk = sitk.Resample(recon_sitk, stack_sitk)
# stack = st.Stack.from_sitk_image(recon_resampled_sitk, "original")
# Create in-plane motion corruption
scale = 1.05
angle_z = 0.05
center_2D = (0, 0)
translation_2D = np.array([1, -2])
intensity_scale = 1
intensity_bias = 0
# Get corrupted stack and corresponding motions
stack_corrupted, motion_sitk, motion_2_sitk = get_inplane_corrupted_stack(
stack, angle_z, center_2D, translation_2D, intensity_scale=intensity_scale, scale=scale, intensity_bias=intensity_bias)
# stack_corrupted.show(1)
# stack.show(1)
# Perform in-plane rigid registration
inplane_registration = inplanereg.IntraStackRegistration(
stack_corrupted, stack)
# inplane_registration = inplanereg.IntraStackRegistration(stack_corrupted)
# inplane_registration.set_image_transform_reference_fit_term("gradient_magnitude")
inplane_registration.set_image_transform_reference_fit_term(
"partial_derivative")
inplane_registration.set_transform_initializer_type("moments")
# inplane_registration.set_transform_type("similarity")
inplane_registration.set_intensity_correction_initializer_type(None)
inplane_registration.set_intensity_correction_type_slice_neighbour_fit(
None)
inplane_registration.set_intensity_correction_type_reference_fit(None)
inplane_registration.use_parameter_normalization(True)
inplane_registration.use_verbose(True)
inplane_registration.set_optimizer_loss(
"linear") # linear, soft_l1, huber
inplane_registration.set_alpha_reference(100)
inplane_registration.set_alpha_neighbour(0)
inplane_registration.set_alpha_parameter(1)
# inplane_registration.use_stack_mask(True)
# inplane_registration.use_reference_mask(True)
inplane_registration.set_optimizer_iter_max(10)
inplane_registration.run()
inplane_registration.print_statistics()
stack_registered = inplane_registration.get_corrected_stack()
parameters = inplane_registration.get_parameters()
sitkh.show_stacks([stack, stack_corrupted, stack_registered.get_resampled_stack_from_slices(
resampling_grid=None, interpolator="Linear")])
# print("Final parameters:")
# print(parameters)
# self.assertEqual(np.round(
# np.linalg.norm(parameters[:,-1] - intensity_scale)
# , decimals = 0), 0)
# 2) Test slice transforms
slice_transforms_sitk = inplane_registration.get_slice_transforms_sitk()
stack_tmp = st.Stack.from_stack(stack_corrupted)
stack_tmp.update_motion_correction_of_slices(slice_transforms_sitk)
stack_diff_sitk = stack_tmp.get_resampled_stack_from_slices(
resampling_grid=stack.sitk).sitk - stack_registered.get_resampled_stack_from_slices(resampling_grid=stack.sitk).sitk
stack_diff_nda = sitk.GetArrayFromImage(stack_diff_sitk)
self.assertEqual(np.round(
np.linalg.norm(stack_diff_nda), decimals=8), 0)
def test_inplane_uniform_scale_similarity_alignment_to_reference(self):
filename_stack = "fetal_brain_0"
# filename_stack = "3D_SheppLoganPhantom_64"
stack = st.Stack.from_filename(
os.path.join(self.dir_test_data, filename_stack + ".nii.gz"),
os.path.join(self.dir_test_data, filename_stack + "_mask.nii.gz")
)
# stack.show(1)
nda = sitk.GetArrayFromImage(stack.sitk)
nda_mask = sitk.GetArrayFromImage(stack.sitk_mask)
i = 5
nda_slice = np.array(nda[i, :, :])
nda_mask_slice = np.array(nda_mask[i, :, :])
for i in range(0, nda.shape[0]): # 23 slices
nda[i, :, :] = nda_slice
nda_mask[i, :, :] = nda_mask_slice
stack_sitk = sitk.GetImageFromArray(nda)
stack_sitk_mask = sitk.GetImageFromArray(nda_mask)
stack_sitk.CopyInformation(stack.sitk)
stack_sitk_mask.CopyInformation(stack.sitk_mask)
stack = st.Stack.from_sitk_image(
stack_sitk, stack.get_filename(), stack_sitk_mask)
# Create in-plane motion corruption
# scale = 1.2
scale = 1
angle_z = 0.05
center_2D = (0, 0)
# translation_2D = np.array([0,0])
translation_2D = np.array([1, -1])
intensity_scale = 1
intensity_bias = 0
# Get corrupted stack and corresponding motions
stack_corrupted, motion_sitk, motion_2_sitk = get_inplane_corrupted_stack(
stack, angle_z, center_2D, translation_2D, scale=scale, intensity_scale=intensity_scale, intensity_bias=intensity_bias, debug=0)
# stack_corrupted.show(1)
# stack.show(1)
# Perform in-plane rigid registrations
inplane_registration = inplanereg.IntraStackRegistration(
stack=stack_corrupted,
reference=stack,
use_stack_mask=True,
use_reference_mask=True,
interpolator="Linear",
use_verbose=True,
)
# inplane_registration = inplanereg.IntraStackRegistration(stack_corrupted)
inplane_registration.set_transform_initializer_type("geometry")
# inplane_registration.set_transform_initializer_type("identity")
inplane_registration.set_intensity_correction_initializer_type(
"affine")
# inplane_registration.set_transform_type("similarity")
inplane_registration.set_transform_type("rigid")
# inplane_registration.set_optimizer("least_squares")
# inplane_registration.set_optimizer("BFGS")
# inplane_registration.set_optimizer("L-BFGS-B")
inplane_registration.set_optimizer("TNC")
# inplane_registration.set_optimizer("Powell")
# inplane_registration.set_optimizer("CG")
# inplane_registration.set_optimizer("Newton-CG")
inplane_registration.set_optimizer_loss("linear")
# inplane_registration.set_optimizer_loss("soft_l1")
# inplane_registration.set_optimizer_loss("arctan")
# inplane_registration.use_parameter_normalization(True)
inplane_registration.set_prior_scale(1/scale)
inplane_registration.set_prior_intensity_coefficients(
(intensity_scale, intensity_bias))
# inplane_registration.set_intensity_correction_type_slice_neighbour_fit(
# "affine")
# inplane_registration.set_intensity_correction_type_reference_fit(
# "affine")
inplane_registration.set_alpha_reference(1)
inplane_registration.set_alpha_neighbour(0)
inplane_registration.set_alpha_parameter(0)
inplane_registration.set_optimizer_iter_max(30)
inplane_registration.run()
inplane_registration.print_statistics()
# inplane_registration._run_registration_pipeline_initialization()
# inplane_registration._apply_motion_correction()
stack_registered = inplane_registration.get_corrected_stack()
parameters = inplane_registration.get_parameters()
sitkh.show_sitk_image([stack.sitk, stack_corrupted.get_resampled_stack_from_slices(interpolator="Linear", resampling_grid=stack.sitk).sitk,
stack_registered.get_resampled_stack_from_slices(interpolator="Linear", resampling_grid=stack.sitk).sitk], label=["original", "corrupted", "recovered"])
# self.assertEqual(np.round(
# np.linalg.norm(nda_diff)
# , decimals = self.accuracy), 0)
# 2) Test slice transforms
slice_transforms_sitk = inplane_registration.get_slice_transforms_sitk()
stack_tmp = st.Stack.from_stack(stack_corrupted)
stack_tmp.update_motion_correction_of_slices(slice_transforms_sitk)
stack_diff_sitk = stack_tmp.get_resampled_stack_from_slices(
resampling_grid=stack.sitk).sitk - stack_registered.get_resampled_stack_from_slices(resampling_grid=stack.sitk).sitk
stack_diff_nda = sitk.GetArrayFromImage(stack_diff_sitk)
self.assertEqual(np.round(
np.linalg.norm(stack_diff_nda), decimals=8), 0)
| 41.542128
| 183
| 0.682004
| 5,653
| 48,812
| 5.523439
| 0.064391
| 0.133263
| 0.071868
| 0.030778
| 0.906578
| 0.889348
| 0.87058
| 0.849763
| 0.835479
| 0.817224
| 0
| 0.01757
| 0.233918
| 48,812
| 1,174
| 184
| 41.577513
| 0.817431
| 0.232095
| 0
| 0.729894
| 0
| 0
| 0.027065
| 0.003901
| 0
| 0
| 0
| 0
| 0.025797
| 1
| 0.019727
| false
| 0.001517
| 0.018209
| 0
| 0.044006
| 0.015175
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83c6db590ffcba51cf08b25c1673a46017f27dca
| 115,543
|
py
|
Python
|
com/vmware/esx/settings/clusters/software_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/esx/settings/clusters/software_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/esx/settings/clusters/software_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.esx.settings.clusters.software.
#---------------------------------------------------------------------------
"""
The ``com.vmware.esx.settings.clusters.software_client`` module provides
classes to manage desired state software for ESX cluster.
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from com.vmware.cis_client import Tasks
from vmware.vapi.stdlib.client.task import Task
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class AddOn(VapiInterface):
"""
The ``AddOn`` class provides methods to manage desired OEM add-on
specification for a given cluster.
"""
RESOURCE_TYPE = "com.vmware.esx.settings.add_on"
"""
Resource type for add-on resource
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.add_on'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _AddOnStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
cluster,
):
"""
Returns the desired OEM add-on specification for a given cluster.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`com.vmware.esx.settings_client.AddOnInfo`
:return: Desired OEM add-on specification.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
if desired OEM add-on specification is not found.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('get',
{
'cluster': cluster,
})
class BaseImage(VapiInterface):
"""
The ``BaseImage`` class provides methods to manage desired ESX base image.
"""
RESOURCE_TYPE = "com.vmware.esx.settings.base_image"
"""
Resource type for base-image resource
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.base_image'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _BaseImageStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
cluster,
):
"""
Returns the desired base-image specification set for given cluster
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`com.vmware.esx.settings_client.BaseImageInfo`
:return: Base-image specification.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
if desired specification is not found.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('get',
{
'cluster': cluster,
})
class Commits(VapiInterface):
"""
The ``Commits`` class provides methods to manage committed changes to
desired software document.
"""
RESOURCE_TYPE = "com.vmware.esx.settings.commit"
"""
Resource type for commit resource
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.commits'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CommitsStub)
self._VAPI_OPERATION_IDS = {}
class Info(VapiStruct):
"""
The ``Commits.Info`` class defines the information about software draft.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
author=None,
commit_time=None,
description=None,
apply_status=None,
):
"""
:type author: :class:`str`
:param author: Author of the commit.
:type commit_time: :class:`datetime.datetime`
:param commit_time: Creation time of the commit.
:type description: :class:`str`
:param description: Description accompanying this commit.
:type apply_status: :class:`Commits.Info.ApplyStatusType`
:param apply_status: Apply status of the commit.
"""
self.author = author
self.commit_time = commit_time
self.description = description
self.apply_status = apply_status
VapiStruct.__init__(self)
class ApplyStatusType(Enum):
"""
The ``Commits.Info.ApplyStatusType`` class defines possible values
regarding the application of this commit.
.. note::
This class represents an enumerated type in the interface language
definition. The class contains class attributes which represent the
values in the current version of the enumerated type. Newer versions of
the enumerated type may contain new values. To use new values of the
enumerated type in communication with a server that supports the newer
version of the API, you instantiate this class. See :ref:`enumerated
type description page <enumeration_description>`.
"""
APPLIED = None
"""
Commit has been applied to the cluster.
"""
NOT_APPLIED = None
"""
Commit hasn't been applied to the cluster.
"""
def __init__(self, string):
"""
:type string: :class:`str`
:param string: String value for the :class:`ApplyStatusType` instance.
"""
Enum.__init__(string)
ApplyStatusType._set_values([
ApplyStatusType('APPLIED'),
ApplyStatusType('NOT_APPLIED'),
])
ApplyStatusType._set_binding_type(type.EnumType(
'com.vmware.esx.settings.clusters.software.commits.info.apply_status_type',
ApplyStatusType))
Info._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.commits.info', {
'author': type.StringType(),
'commit_time': type.DateTimeType(),
'description': type.StringType(),
'apply_status': type.ReferenceType(__name__, 'Commits.Info.ApplyStatusType'),
},
Info,
False,
None))
def get(self,
cluster,
commit,
):
"""
Returns the information about a specific commit.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type commit: :class:`str`
:param commit: Identifier of the specific commit.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.commit``.
:rtype: :class:`Commits.Info`
:return: Information about the commit.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
if desired specification commit is not found.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
"""
return self._invoke('get',
{
'cluster': cluster,
'commit': commit,
})
class Compliance(VapiInterface):
"""
The ``Compliance`` class provides methods to get the last software
compliance result for an ESX cluster.
"""
RESOURCE_TYPE = "ClusterComputeResource"
"""
Resource type for cluster resource
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.compliance'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComplianceStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
cluster,
):
"""
Returns the compliance state for the cluster
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`com.vmware.esx.settings_client.ClusterCompliance`
:return: Cluster compliance result.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
if the compliance information is unavailable.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('get',
{
'cluster': cluster,
})
class Components(VapiInterface):
"""
The ``Components`` class provides methods to get desired component
specification for an ESX cluster.
"""
RESOURCE_TYPE = "com.vmware.esx.settings.component"
"""
Resource type for component resource
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.components'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComponentsStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
cluster,
component,
):
"""
Returns the component version for the given component in the desired
software specification.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type component: :class:`str`
:param component: Identifier of the component.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.component``.
:rtype: :class:`com.vmware.esx.settings_client.ComponentInfo` or ``None``
:return: Details about the component version.
If None then version is supposed to be chosen based on the
constraints in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
If invalid component name is provided.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
or no component associated with ``component`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('get',
{
'cluster': cluster,
'component': component,
})
def list(self,
cluster,
):
"""
Returns a list of components in the desired software specification.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`dict` of :class:`str` and :class:`com.vmware.esx.settings_client.ComponentInfo`
:return: Map of ComponentInfo keyed by the component identifier. If no
version is specified in desired software specification, then
ComponentInfo will not be present for that component.
The key in the return value :class:`dict` will be an identifier for
the resource type: ``com.vmware.esx.settings.component``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is some unknown internal error. The accompanying error
message will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('list',
{
'cluster': cluster,
})
class Drafts(VapiInterface):
"""
The ``Drafts`` class provides methods to manage working copy of software
documents.
"""
RESOURCE_TYPE = "com.vmware.esx.settings.draft"
"""
Resource type for draft resource
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.drafts'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DraftsStub)
self._VAPI_OPERATION_IDS = {}
self._VAPI_OPERATION_IDS.update({'commit_task': 'commit$task'})
self._VAPI_OPERATION_IDS.update({'validate_task': 'validate$task'})
self._VAPI_OPERATION_IDS.update({'scan_task': 'scan$task'})
class StatusType(Enum):
"""
The ``Drafts.StatusType`` class defines possible values of status of a
software draft.
.. note::
This class represents an enumerated type in the interface language
definition. The class contains class attributes which represent the
values in the current version of the enumerated type. Newer versions of
the enumerated type may contain new values. To use new values of the
enumerated type in communication with a server that supports the newer
version of the API, you instantiate this class. See :ref:`enumerated
type description page <enumeration_description>`.
"""
VALID = None
"""
Software draft is valid.
"""
INVALID = None
"""
Software draft is invalid.
"""
def __init__(self, string):
"""
:type string: :class:`str`
:param string: String value for the :class:`StatusType` instance.
"""
Enum.__init__(string)
StatusType._set_values([
StatusType('VALID'),
StatusType('INVALID'),
])
StatusType._set_binding_type(type.EnumType(
'com.vmware.esx.settings.clusters.software.drafts.status_type',
StatusType))
class SourceType(Enum):
"""
The ``Drafts.SourceType`` class defines possible values of sources to
import software specification.
.. note::
This class represents an enumerated type in the interface language
definition. The class contains class attributes which represent the
values in the current version of the enumerated type. Newer versions of
the enumerated type may contain new values. To use new values of the
enumerated type in communication with a server that supports the newer
version of the API, you instantiate this class. See :ref:`enumerated
type description page <enumeration_description>`.
"""
PULL = None
"""
Content is pulled from the URL location. The URL scheme of the value in
{\\\\@link #pullLocation) can be http, https or file.
"""
PUSH = None
"""
Content was previously uploaded using the file upload enpoint present on
vCenter appliance. This endpoint is present at
https://VCENTERFQDN:9087/vum-fileupload URL.
"""
JSON_STRING = None
"""
The string representing the content of the software specfication.
"""
LATEST_RECOMMENDATION = None
"""
Content is from recommended image specification based on latest base image
version. Recommendations can be generated using {\\\\@link:
com.vmware.esx.settings.clusters.software.Recommendations#generate}.
"""
CURRENT_SERIES_RECOMMENDATION = None
"""
Content is from recommended image specification based on latest base image
patch or update of the current series. For example, a cluster's current
desired base image is 7.0. Recommendation engine will look into any
recommendable image specification with 7.0 series base images available at
depot manager and try to recommend the highest version within 7.0 series if
possible. Let's say in this example, depot manager has 7.0 patch a and 7.0
update 1 base images. Recommendation engine would first validate all
possible images based on 7.0 update 1. If it finds a valid one, it will
store the recommended content with that series. This enum value will point
to that stored recommended image content. Recommendations can be generated
using {\\\\@link:
com.vmware.esx.settings.clusters.software.Recommendations#generate}.
"""
def __init__(self, string):
"""
:type string: :class:`str`
:param string: String value for the :class:`SourceType` instance.
"""
Enum.__init__(string)
SourceType._set_values([
SourceType('PULL'),
SourceType('PUSH'),
SourceType('JSON_STRING'),
SourceType('LATEST_RECOMMENDATION'),
SourceType('CURRENT_SERIES_RECOMMENDATION'),
])
SourceType._set_binding_type(type.EnumType(
'com.vmware.esx.settings.clusters.software.drafts.source_type',
SourceType))
class ValidateResult(VapiStruct):
"""
The ``Drafts.ValidateResult`` class contains attributes to describe result
of validation of desired software specification.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
notifications=None,
):
"""
:type notifications: :class:`com.vmware.esx.settings_client.Notifications`
:param notifications: Notifications associated with the validation.
"""
self.notifications = notifications
VapiStruct.__init__(self)
ValidateResult._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.drafts.validate_result', {
'notifications': type.ReferenceType('com.vmware.esx.settings_client', 'Notifications'),
},
ValidateResult,
False,
None))
class Metadata(VapiStruct):
"""
The ``Drafts.Metadata`` class defines the metadata information about
software draft.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
owner=None,
status=None,
creation_time=None,
):
"""
:type owner: :class:`str`
:param owner: Owner of the software draft.
:type status: :class:`Drafts.StatusType`
:param status: Status of the software draft.
:type creation_time: :class:`datetime.datetime`
:param creation_time: Creation time of the software draft.
"""
self.owner = owner
self.status = status
self.creation_time = creation_time
VapiStruct.__init__(self)
Metadata._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.drafts.metadata', {
'owner': type.StringType(),
'status': type.ReferenceType(__name__, 'Drafts.StatusType'),
'creation_time': type.DateTimeType(),
},
Metadata,
False,
None))
class Info(VapiStruct):
"""
The ``Drafts.Info`` class defines the information about software draft.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
metadata=None,
software=None,
):
"""
:type metadata: :class:`Drafts.Metadata`
:param metadata: Metadata about the software draft.
:type software: :class:`com.vmware.esx.settings_client.SoftwareInfo`
:param software: Software specification associated with the draft.
"""
self.metadata = metadata
self.software = software
VapiStruct.__init__(self)
Info._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.drafts.info', {
'metadata': type.ReferenceType(__name__, 'Drafts.Metadata'),
'software': type.ReferenceType('com.vmware.esx.settings_client', 'SoftwareInfo'),
},
Info,
False,
None))
class Summary(VapiStruct):
"""
The ``Drafts.Summary`` class defines the summary information about software
draft.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
metadata=None,
):
"""
:type metadata: :class:`Drafts.Metadata`
:param metadata: Metadata about the software draft.
"""
self.metadata = metadata
VapiStruct.__init__(self)
Summary._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.drafts.summary', {
'metadata': type.ReferenceType(__name__, 'Drafts.Metadata'),
},
Summary,
False,
None))
class FilterSpec(VapiStruct):
"""
The ``Drafts.FilterSpec`` class contains attributes used to filter the
results when listing software drafts. See :func:`Drafts.list`.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
owners=None,
):
"""
:type owners: :class:`set` of :class:`str` or ``None``
:param owners: Owners of the drafts.
If None or empty, drafts from all owners will be returned.
"""
self.owners = owners
VapiStruct.__init__(self)
FilterSpec._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.drafts.filter_spec', {
'owners': type.OptionalType(type.SetType(type.StringType())),
},
FilterSpec,
False,
None))
class CommitSpec(VapiStruct):
"""
The ``Drafts.CommitSpec`` class contains attributes that are used to create
a new commit.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
message=None,
):
"""
:type message: :class:`str` or ``None``
:param message: Message to include with the commit.
If None, message is set to empty string.
"""
self.message = message
VapiStruct.__init__(self)
CommitSpec._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.drafts.commit_spec', {
'message': type.OptionalType(type.StringType()),
},
CommitSpec,
False,
None))
class ImportSpec(VapiStruct):
"""
The ``Drafts.ImportSpec`` class defines the information used to import the
desired software specification.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
_validator_list = [
UnionValidator(
'source_type',
{
'PULL' : [('location', True)],
'PUSH' : [('file_id', True)],
'JSON_STRING' : [('software_spec', True)],
'LATEST_RECOMMENDATION' : [],
'CURRENT_SERIES_RECOMMENDATION' : [],
}
),
]
def __init__(self,
source_type=None,
location=None,
file_id=None,
software_spec=None,
):
"""
:type source_type: :class:`Drafts.SourceType`
:param source_type: Type of the source to import the desired software specification
:type location: :class:`str`
:param location: Location of the software specification file to be imported.
This attribute is optional and it is only relevant when the value
of ``sourceType`` is :attr:`Drafts.SourceType.PULL`.
:type file_id: :class:`str`
:param file_id: File identifier returned by the file upload endpoint after file is
uploaded.
This attribute is optional and it is only relevant when the value
of ``sourceType`` is :attr:`Drafts.SourceType.PUSH`.
:type software_spec: :class:`str`
:param software_spec: The JSON string representing the desired software specification.
This attribute is optional and it is only relevant when the value
of ``sourceType`` is :attr:`Drafts.SourceType.JSON_STRING`.
"""
self.source_type = source_type
self.location = location
self.file_id = file_id
self.software_spec = software_spec
VapiStruct.__init__(self)
ImportSpec._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.drafts.import_spec', {
'source_type': type.ReferenceType(__name__, 'Drafts.SourceType'),
'location': type.OptionalType(type.URIType()),
'file_id': type.OptionalType(type.StringType()),
'software_spec': type.OptionalType(type.StringType()),
},
ImportSpec,
False,
None))
def commit_task(self,
cluster,
draft,
spec,
):
"""
Commits the specified draft as the desired state document. The result
of this operation can be queried by calling the cis/tasks/{task-id}
where the task-id is the response of this operation.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type draft: :class:`str`
:param draft: Identifier of the draft.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.draft``.
:type spec: :class:`Drafts.CommitSpec`
:param spec: The spec to be used to create the commit.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` or no draft
associated with ``draft`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.NotAllowedInCurrentState`
If there is another operation in progress.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
If validation of the software document fails. The value of the data
attribute of :class:`com.vmware.vapi.std.errors_client.Error` will
be a class that contains all the attributes defined in
:class:`Drafts.ValidateResult`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
"""
task_id = self._invoke('commit$task',
{
'cluster': cluster,
'draft': draft,
'spec': spec,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.IdType(resource_types='com.vmware.esx.settings.commit'))
return task_instance
def create(self,
cluster,
):
"""
Creates a new software draft from the desired document. It will be
deleted, when the draft is committed successfully. If a desired
document is missing, then this method will create an empty draft.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`str`
:return: Identifier of the working copy of the document.
The return value will be an identifier for the resource type:
``com.vmware.esx.settings.draft``.
:raise: :class:`com.vmware.vapi.std.errors_client.AlreadyExists`
If there is already a draft created by this user.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Write``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Write``.
"""
return self._invoke('create',
{
'cluster': cluster,
})
def delete(self,
cluster,
draft,
):
"""
Deletes the software draft.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type draft: :class:`str`
:param draft: Identifier of the working copy of the document.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.draft``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` or no draft
associated with ``draft`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Write``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Write``.
"""
return self._invoke('delete',
{
'cluster': cluster,
'draft': draft,
})
def get(self,
cluster,
draft,
):
"""
Returns the information about given software draft.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type draft: :class:`str`
:param draft: Identifier of the software draft.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.draft``.
:rtype: :class:`Drafts.Info`
:return: Information about the Software Draft.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` or no draft
associated with ``draft`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('get',
{
'cluster': cluster,
'draft': draft,
})
def list(self,
cluster,
filter=None,
):
"""
Returns information about the software drafts for the specified cluster
that match the :class:`Drafts.FilterSpec`.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type filter: :class:`Drafts.FilterSpec` or ``None``
:param filter: Filter to be applied while returning drafts.
If None, all drafts will be returned.
:rtype: :class:`dict` of :class:`str` and :class:`Drafts.Summary`
:return: Map of software drafts keyed by their identifiers.
The key in the return value :class:`dict` will be an identifier for
the resource type: ``com.vmware.esx.settings.draft``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('list',
{
'cluster': cluster,
'filter': filter,
})
def validate_task(self,
cluster,
draft,
):
"""
Validates the software draft. The result of this operation can be
queried by calling the cis/tasks/{task-id} where the task-id is the
response of this operation.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type draft: :class:`str`
:param draft: Identifier of the software draft.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.draft``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` or no draft
associated with ``draft`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
"""
task_id = self._invoke('validate$task',
{
'cluster': cluster,
'draft': draft,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.ReferenceType(__name__, 'Drafts.ValidateResult'))
return task_instance
def scan_task(self,
cluster,
draft,
):
"""
Scans all the hosts in the cluster against the software draft. The
result of this operation can be queried by calling the
cis/tasks/{task-id} where the task-id is the response of this
operation.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type draft: :class:`str`
:param draft: Identifier of the working copy of the document.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.draft``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` or no draft
associated with ``draft`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
"""
task_id = self._invoke('scan$task',
{
'cluster': cluster,
'draft': draft,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.ReferenceType('com.vmware.esx.settings_client', 'ClusterCompliance'))
return task_instance
def import_software_spec(self,
cluster,
spec,
):
"""
Imports the desired software specification as a new draft. If a desired
document is missing, then this method will create an empty draft except
when the source type is of either
:attr:`Drafts.SourceType.LATEST_RECOMMENDATION` or
:attr:`Drafts.SourceType.CURRENT_SERIES_RECOMMENDATION`, then
:class:`com.vmware.vapi.std.errors_client.NotFound` error is reported.
In addition, the exisiting draft will be overwritten when the source
type is of either ``LATEST_RECOMMENDATION`` or
``CURRENT_SERIES_RECOMMENDATION``.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type spec: :class:`Drafts.ImportSpec`
:param spec: Specification to import desired software specification.
:rtype: :class:`str`
:return: Identifier of the software draft.
The return value will be an identifier for the resource type:
``com.vmware.esx.settings.draft``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
if the source type of import specification is of either
``LATEST_RECOMMENDATION`` or ``CURRENT_SERIES_RECOMMENDATION``, and
a recommendation does not exist for the cluster. It was either
never generated or deleted due to changes in cluster state such as
a new desired image spec being committed.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Write``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Write``.
"""
return self._invoke('import_software_spec',
{
'cluster': cluster,
'spec': spec,
})
class EffectiveComponents(VapiInterface):
"""
The ``EffectiveComponents`` class provides methods to get effective list of
components.
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.effective_components'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _EffectiveComponentsStub)
self._VAPI_OPERATION_IDS = {}
def list(self,
cluster,
):
"""
Returns the effective components for the cluster.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`dict` of :class:`str` and :class:`com.vmware.esx.settings_client.EffectiveComponentInfo`
:return: Map of effective components keyed by their identifier.
The key in the return value :class:`dict` will be an identifier for
the resource type: ``com.vmware.esx.settings.component``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
"""
return self._invoke('list',
{
'cluster': cluster,
})
class Recommendations(VapiInterface):
"""
The ``Recommendations`` class provides methods to manage the generation and
retrieval of recommended image specs.
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.recommendations'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _RecommendationsStub)
self._VAPI_OPERATION_IDS = {}
self._VAPI_OPERATION_IDS.update({'generate_task': 'generate$task'})
class ExplanationDetails(VapiStruct):
"""
The ``Recommendations.ExplanationDetails`` class contains attributes to
describe the result of validation of desired software specification.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
display_name=None,
display_version=None,
explanation=None,
):
"""
:type display_name: :class:`str`
:param display_name: Display name of an excluded image entity (base image, add-on etc.).
:type display_version: :class:`str`
:param display_version: Display version of an excluded image entity (base image, add-on
etc.).
:type explanation: :class:`list` of :class:`com.vmware.vapi.std_client.LocalizableMessage`
:param explanation: List of explanations on why the image entity is excluded.
"""
self.display_name = display_name
self.display_version = display_version
self.explanation = explanation
VapiStruct.__init__(self)
ExplanationDetails._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.recommendations.explanation_details', {
'display_name': type.StringType(),
'display_version': type.StringType(),
'explanation': type.ListType(type.ReferenceType('com.vmware.vapi.std_client', 'LocalizableMessage')),
},
ExplanationDetails,
False,
None))
class Info(VapiStruct):
"""
The ``Recommendations.Info`` class defines the information about the most
recent recommendation generation result.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
latest_recommendation=None,
current_series_recommendation=None,
base_image_explanation_details=None,
check_time=None,
):
"""
:type latest_recommendation: :class:`com.vmware.esx.settings_client.SoftwareInfo` or ``None``
:param latest_recommendation: Recommended image specification based on latest base image version.
None if no recommended image based on latest base image version is
available.
:type current_series_recommendation: :class:`com.vmware.esx.settings_client.SoftwareInfo` or ``None``
:param current_series_recommendation: Recommended image specification based on latest base image patch or
update of the current series.
None if no recommended image based on latest base image patch or
update of the current series is available.
:type base_image_explanation_details: :class:`list` of :class:`Recommendations.ExplanationDetails`
:param base_image_explanation_details: Details about why some base images are excluded in recommendation.
:type check_time: :class:`datetime.datetime` or ``None``
:param check_time: The most recent timestamp when check for recommended image is
launched.
None if no recommendation check has ever been launched.
"""
self.latest_recommendation = latest_recommendation
self.current_series_recommendation = current_series_recommendation
self.base_image_explanation_details = base_image_explanation_details
self.check_time = check_time
VapiStruct.__init__(self)
Info._set_binding_type(type.StructType(
'com.vmware.esx.settings.clusters.software.recommendations.info', {
'latest_recommendation': type.OptionalType(type.ReferenceType('com.vmware.esx.settings_client', 'SoftwareInfo')),
'current_series_recommendation': type.OptionalType(type.ReferenceType('com.vmware.esx.settings_client', 'SoftwareInfo')),
'base_image_explanation_details': type.ListType(type.ReferenceType(__name__, 'Recommendations.ExplanationDetails')),
'check_time': type.OptionalType(type.DateTimeType()),
},
Info,
False,
None))
def generate_task(self,
cluster,
):
"""
Generates recommended software image spec(s) based on current desired
software spec. The result of this operation can be queried by calling
the cis/tasks/{task-id} where the task-id is the response of this
operation.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster``.
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
If a new desired image is committed in parallel via a different
client while recommendation is being generated.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
"""
task_id = self._invoke('generate$task',
{
'cluster': cluster,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.VoidType())
return task_instance
def get(self,
cluster,
):
"""
Returns Information about the most recent recommendation generation
result.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`Recommendations.Info`
:return: Information about the most recent recommendation generation result.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
recommendation is non-existing for the cluster due to either it is
never generated or deleted due to changes in cluster state such as
a new desired image spec being committed.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('get',
{
'cluster': cluster,
})
class Solutions(VapiInterface):
"""
The ``Solutions`` class provides methods to manage desired software
solution specifications for an ESX cluster.
"""
RESOURCE_TYPE = "com.vmware.esx.settings.solution"
"""
Resource type for solution resource
"""
_VAPI_SERVICE_ID = 'com.vmware.esx.settings.clusters.software.solutions'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SolutionsStub)
self._VAPI_OPERATION_IDS = {}
self._VAPI_OPERATION_IDS.update({'set_task': 'set$task'})
self._VAPI_OPERATION_IDS.update({'delete_task': 'delete$task'})
def get(self,
cluster,
solution,
):
"""
Returns components registered for the given solution in the desired
software specification.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type solution: :class:`str`
:param solution: Identifier of the solution.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.solution``.
:rtype: :class:`com.vmware.esx.settings_client.SolutionInfo`
:return: Specification of components registered by the solution.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
If invalid component name is provided.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system or
or no solution associated with ``solution`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('get',
{
'cluster': cluster,
'solution': solution,
})
def list(self,
cluster,
):
"""
Returns all solutions in the desired software specification.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class:`dict` of :class:`str` and :class:`com.vmware.esx.settings_client.SolutionInfo`
:return: Map of solutions where key is solution identifier and value is a
list of components registered by that solution.
The key in the return value :class:`dict` will be an identifier for
the resource type: ``com.vmware.esx.settings.solution``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
If the caller is not authenticated.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
* The resource ``ClusterComputeResource`` referenced by the
parameter ``cluster`` requires
``VcIntegrity.lifecycleSoftwareSpecification.Read``.
"""
return self._invoke('list',
{
'cluster': cluster,
})
def set_task(self,
cluster,
solution,
spec,
):
"""
Sets the components registered for the given solution in the desired
software specification. The task will set only one solution
specification at a time. Solution constraints would be validated with
the current desired software specification before it is committed as
new desired spec. The result of this operation can be queried by
calling the cis/tasks/{task-id} where the task-id is the response of
this operation.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type solution: :class:`str`
:param solution: Identifier of the solution.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.solution``.
:type spec: :class:`com.vmware.esx.settings_client.SolutionSpec`
:param spec: Registered solution specification.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if validation of the software document fails. The value of the data
attribute of :class:`com.vmware.vapi.std.errors_client.Error` will
be a class that contains all the attributes defined in null.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` or no solution
associated with ``solution`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('set$task',
{
'cluster': cluster,
'solution': solution,
'spec': spec,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.IdType(resource_types='com.vmware.esx.settings.commit'))
return task_instance
def delete_task(self,
cluster,
solution,
):
"""
Deletes the given solution from the desired software specification. The
deletion will be validated along with the entire software specification
before it is committed as new desired spec. The result of this
operation can be queried by calling the cis/tasks/{task-id} where the
task-id is the response of this operation.
:type cluster: :class:`str`
:param cluster: Identifier of the cluster.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type solution: :class:`str`
:param solution: Identifier of the solution.
The parameter must be an identifier for the resource type:
``com.vmware.esx.settings.solution``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
If there is unknown internal error. The accompanying error message
will give more details about the failure.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if validation of the software document fails. The value of the data
attribute of :class:`com.vmware.vapi.std.errors_client.Error` will
be a class that contains all the attributes defined in null.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
If there is no cluster associated with ``cluster`` or no solution
associated with ``solution`` in the system.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
If the service is not available.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('delete$task',
{
'cluster': cluster,
'solution': solution,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.IdType(resource_types='com.vmware.esx.settings.commit'))
return task_instance
class _AddOnStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/add-on',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.esx.settings_client', 'AddOnInfo'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.add_on',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _BaseImageStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/base-image',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.esx.settings_client', 'BaseImageInfo'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.base_image',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _CommitsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'commit': type.IdType(resource_types='com.vmware.esx.settings.commit'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/commits/{commit}',
path_variables={
'cluster': 'cluster',
'commit': 'commit',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType(__name__, 'Commits.Info'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.commits',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _ComplianceStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/compliance',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.esx.settings_client', 'ClusterCompliance'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.compliance',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _ComponentsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'component': type.IdType(resource_types='com.vmware.esx.settings.component'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/components/{component}',
path_variables={
'cluster': 'cluster',
'component': 'component',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/components',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.OptionalType(type.ReferenceType('com.vmware.esx.settings_client', 'ComponentInfo')),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.MapType(type.IdType(), type.ReferenceType('com.vmware.esx.settings_client', 'ComponentInfo')),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.components',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _DraftsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for commit operation
commit_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'draft': type.IdType(resource_types='com.vmware.esx.settings.draft'),
'spec': type.ReferenceType(__name__, 'Drafts.CommitSpec'),
})
commit_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.not_allowed_in_current_state':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotAllowedInCurrentState'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
commit_input_value_validator_list = [
]
commit_output_validator_list = [
]
commit_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/esx/settings/clusters/{cluster}/software/drafts/{draft}',
request_body_parameter='spec',
path_variables={
'cluster': 'cluster',
'draft': 'draft',
},
query_parameters={
},
dispatch_parameters={
'action': 'commit',
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for create operation
create_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.already_exists':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'AlreadyExists'),
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/esx/settings/clusters/{cluster}/software/drafts',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'draft': type.IdType(resource_types='com.vmware.esx.settings.draft'),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/esx/settings/clusters/{cluster}/software/drafts/{draft}',
path_variables={
'cluster': 'cluster',
'draft': 'draft',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'draft': type.IdType(resource_types='com.vmware.esx.settings.draft'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/drafts/{draft}',
path_variables={
'cluster': 'cluster',
'draft': 'draft',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'filter': type.OptionalType(type.ReferenceType(__name__, 'Drafts.FilterSpec')),
})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/drafts',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for validate operation
validate_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'draft': type.IdType(resource_types='com.vmware.esx.settings.draft'),
})
validate_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
validate_input_value_validator_list = [
]
validate_output_validator_list = [
]
validate_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/esx/settings/clusters/{cluster}/software/drafts/{draft}',
path_variables={
'cluster': 'cluster',
'draft': 'draft',
},
query_parameters={
},
dispatch_parameters={
'action': 'validate',
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for scan operation
scan_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'draft': type.IdType(resource_types='com.vmware.esx.settings.draft'),
})
scan_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
scan_input_value_validator_list = [
]
scan_output_validator_list = [
]
scan_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/esx/settings/clusters/{cluster}/software/drafts/{draft}',
path_variables={
'cluster': 'cluster',
'draft': 'draft',
},
query_parameters={
},
dispatch_parameters={
'action': 'scan',
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for import_software_spec operation
import_software_spec_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'spec': type.ReferenceType(__name__, 'Drafts.ImportSpec'),
})
import_software_spec_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
}
import_software_spec_input_value_validator_list = [
]
import_software_spec_output_validator_list = [
]
import_software_spec_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/esx/settings/clusters/{cluster}/software/drafts',
request_body_parameter='spec',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
'action': 'import-software-spec',
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'commit$task': {
'input_type': commit_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': commit_error_dict,
'input_value_validator_list': commit_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'create': {
'input_type': create_input_type,
'output_type': type.IdType(resource_types='com.vmware.esx.settings.draft'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType(__name__, 'Drafts.Info'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.MapType(type.IdType(), type.ReferenceType(__name__, 'Drafts.Summary')),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'validate$task': {
'input_type': validate_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': validate_error_dict,
'input_value_validator_list': validate_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'scan$task': {
'input_type': scan_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': scan_error_dict,
'input_value_validator_list': scan_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'import_software_spec': {
'input_type': import_software_spec_input_type,
'output_type': type.IdType(resource_types='com.vmware.esx.settings.draft'),
'errors': import_software_spec_error_dict,
'input_value_validator_list': import_software_spec_input_value_validator_list,
'output_validator_list': import_software_spec_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'commit': commit_rest_metadata,
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'validate': validate_rest_metadata,
'scan': scan_rest_metadata,
'import_software_spec': import_software_spec_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.drafts',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _EffectiveComponentsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/effective-components',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.MapType(type.IdType(), type.ReferenceType('com.vmware.esx.settings_client', 'EffectiveComponentInfo')),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.effective_components',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _RecommendationsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for generate operation
generate_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
generate_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
}
generate_input_value_validator_list = [
]
generate_output_validator_list = [
]
generate_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/esx/settings/clusters/{cluster}/software/recommendations',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
'action': 'generate',
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/recommendations',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'generate$task': {
'input_type': generate_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': generate_error_dict,
'input_value_validator_list': generate_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType(__name__, 'Recommendations.Info'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'generate': generate_rest_metadata,
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.recommendations',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _SolutionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'solution': type.IdType(resource_types='com.vmware.esx.settings.solution'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/solutions/{solution}',
path_variables={
'cluster': 'cluster',
'solution': 'solution',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/esx/settings/clusters/{cluster}/software/solutions',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for set operation
set_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'solution': type.IdType(resource_types='com.vmware.esx.settings.solution'),
'spec': type.ReferenceType('com.vmware.esx.settings_client', 'SolutionSpec'),
})
set_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
set_input_value_validator_list = [
]
set_output_validator_list = [
]
set_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/esx/settings/clusters/{cluster}/software/solutions/{solution}',
request_body_parameter='spec',
path_variables={
'cluster': 'cluster',
'solution': 'solution',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'solution': type.IdType(resource_types='com.vmware.esx.settings.solution'),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/esx/settings/clusters/{cluster}/software/solutions/{solution}',
path_variables={
'cluster': 'cluster',
'solution': 'solution',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.esx.settings_client', 'SolutionInfo'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.MapType(type.IdType(), type.ReferenceType('com.vmware.esx.settings_client', 'SolutionInfo')),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'set$task': {
'input_type': set_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': set_error_dict,
'input_value_validator_list': set_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'delete$task': {
'input_type': delete_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
'set': set_rest_metadata,
'delete': delete_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.esx.settings.clusters.software.solutions',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class StubFactory(StubFactoryBase):
_attrs = {
'AddOn': AddOn,
'BaseImage': BaseImage,
'Commits': Commits,
'Compliance': Compliance,
'Components': Components,
'Drafts': Drafts,
'EffectiveComponents': EffectiveComponents,
'Recommendations': Recommendations,
'Solutions': Solutions,
'drafts': 'com.vmware.esx.settings.clusters.software.drafts_client.StubFactory',
'reports': 'com.vmware.esx.settings.clusters.software.reports_client.StubFactory',
}
| 41.848243
| 139
| 0.593589
| 11,549
| 115,543
| 5.769417
| 0.042255
| 0.053218
| 0.0558
| 0.068677
| 0.815851
| 0.794015
| 0.77476
| 0.75615
| 0.741922
| 0.728505
| 0
| 0.000288
| 0.309651
| 115,543
| 2,760
| 140
| 41.863406
| 0.835053
| 0.356292
| 0
| 0.578281
| 1
| 0
| 0.26321
| 0.192169
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034644
| false
| 0
| 0.021985
| 0
| 0.103264
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7935bf2eef3aa2a0bda544c6ef99f49c31f5f573
| 585
|
py
|
Python
|
train_covid20cases_timm-regnetx_002_grid_dropout.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_covid20cases_timm-regnetx_002_grid_dropout.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_covid20cases_timm-regnetx_002_grid_dropout.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/train_covid20cases_unetplusplus_timm-regnetx_002_fold0_grid_dropout.yml",
"python main.py --configs configs/train_covid20cases_unetplusplus_timm-regnetx_002_fold1_grid_dropout.yml",
"python main.py --configs configs/train_covid20cases_unetplusplus_timm-regnetx_002_fold2_grid_dropout.yml",
"python main.py --configs configs/train_covid20cases_unetplusplus_timm-regnetx_002_fold3_grid_dropout.yml",
"python main.py --configs configs/train_covid20cases_unetplusplus_timm-regnetx_002_fold4_grid_dropout.yml",
]
for l in ls:
os.system(l)
| 53.181818
| 111
| 0.85812
| 85
| 585
| 5.494118
| 0.294118
| 0.107066
| 0.12848
| 0.203426
| 0.858672
| 0.858672
| 0.858672
| 0.858672
| 0.858672
| 0.858672
| 0
| 0.054152
| 0.052991
| 585
| 11
| 112
| 53.181818
| 0.788809
| 0
| 0
| 0
| 0
| 0
| 0.887372
| 0.674061
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
793c3c3c16c0611464010c393da94a0681df9705
| 9,220
|
py
|
Python
|
pypykatz/lsadecryptor/packages/credman/templates.py
|
netredo/pypykatz
|
0cdf1a7439e95da91c94ed1ceff4147a09dbdf26
|
[
"MIT"
] | 1
|
2020-01-11T20:41:01.000Z
|
2020-01-11T20:41:01.000Z
|
pypykatz/lsadecryptor/packages/credman/templates.py
|
samuelriesz/pypykatz
|
e5ee5cadb99c543a07940082cf65fe60c0927920
|
[
"MIT"
] | null | null | null |
pypykatz/lsadecryptor/packages/credman/templates.py
|
samuelriesz/pypykatz
|
e5ee5cadb99c543a07940082cf65fe60c0927920
|
[
"MIT"
] | 1
|
2019-09-19T09:26:16.000Z
|
2019-09-19T09:26:16.000Z
|
#!/usr/bin/env python3
#
# Author:
# Tamas Jos (@skelsec)
#
import io
from minidump.win_datatypes import *
from pypykatz.commons.common import *
from pypykatz.commons.win_datatypes import *
from pypykatz.lsadecryptor.package_commons import *
class CredmanTemplate(PackageTemplate):
def __init__(self):
super().__init__('Credman')
self.signature = None
self.first_entry_offset = None
self.list_entry = None
@staticmethod
def get_template(sysinfo):
template = CredmanTemplate()
if sysinfo.architecture == KatzSystemArchitecture.X64:
if sysinfo.buildnumber < WindowsMinBuild.WIN_VISTA.value:
template.list_entry = KIWI_CREDMAN_LIST_ENTRY_5
elif WindowsMinBuild.WIN_VISTA.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_7.value:
template.list_entry = KIWI_CREDMAN_LIST_ENTRY_60
else:
template.list_entry = KIWI_CREDMAN_LIST_ENTRY
else:
if sysinfo.buildnumber < WindowsMinBuild.WIN_VISTA.value:
template.list_entry = KIWI_CREDMAN_LIST_ENTRY_5_X86
elif WindowsMinBuild.WIN_VISTA.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_7.value:
template.list_entry = KIWI_CREDMAN_LIST_ENTRY_60_X86
else:
template.list_entry = KIWI_CREDMAN_LIST_ENTRY_X86
template.log_template('list_entry', template.list_entry)
return template
class PKIWI_CREDMAN_LIST_ENTRY_5_X86(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_LIST_ENTRY_5_X86)
class KIWI_CREDMAN_LIST_ENTRY_5_X86:
def __init__(self, reader):
#IMPORTANT NOTICE, THE STRUCTURE STARTS BEFORE THE FLINK/BLINK POINTER, SO WE NEED TO READ BACKWARDS
#
reader.move(reader.tell() - 32)
reader.align() #not sure if it's needed here
#
self.cbEncPassword = ULONG(reader).value
reader.align()
self.encPassword = PWSTR
self.unk0 = ULONG(reader).value
self.unk1 = ULONG(reader).value
self.unk2 = PVOID(reader)
self.unk3 = PVOID(reader)
self.UserName = PWSTR(reader)
self.cbUserName = ULONG(reader).value
reader.align()
self.Flink = PKIWI_CREDMAN_LIST_ENTRY_5
self.Blink = PKIWI_CREDMAN_LIST_ENTRY_5
self.server1 = LSA_UNICODE_STRING
self.unk6 = PVOID(reader)
self.unk7 = PVOID(reader)
self.user = LSA_UNICODE_STRING(reader)
self.unk8 = ULONG(reader).value
reader.align()
self.server2 = LSA_UNICODE_STRING
class PKIWI_CREDMAN_LIST_ENTRY_60_X86(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_LIST_ENTRY_60_X86)
class KIWI_CREDMAN_LIST_ENTRY_60_X86:
def __init__(self, reader):
#IMPORTANT NOTICE, THE STRUCTURE STARTS BEFORE THE FLINK/BLINK POINTER, SO WE NEED TO READ BACKWARDS
#
reader.move(reader.tell() - 32)
reader.align() #not sure if it's needed here
#
#input('KIWI_CREDMAN_LIST_ENTRY_60 \n%s' % hexdump(reader.peek(0x200), start = reader.tell()))
#
self.cbEncPassword = ULONG(reader).value
reader.align()
self.encPassword = PWSTR(reader)
self.unk0 = ULONG(reader).value
self.unk1 = ULONG(reader).value
self.unk2 = PVOID(reader)
self.unk3 = PVOID(reader)
self.UserName = PWSTR(reader)
self.cbUserName = ULONG(reader).value
reader.align()
self.Flink = PKIWI_CREDMAN_LIST_ENTRY_60
self.Blink = PKIWI_CREDMAN_LIST_ENTRY_60
self.type = LSA_UNICODE_STRING(reader)
self.unk5 = PVOID(reader)
self.server1 = LSA_UNICODE_STRING(reader)
self.unk6 = PVOID(reader)
self.unk7 = PVOID(reader)
self.unk8 = PVOID(reader)
self.unk9 = PVOID(reader)
self.unk10 = PVOID(reader)
self.user = LSA_UNICODE_STRING(reader)
self.unk11 = ULONG(reader).value
reader.align()
self.server2 = LSA_UNICODE_STRING(reader)
class PKIWI_CREDMAN_LIST_ENTRY_X86(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_LIST_ENTRY_X86)
class KIWI_CREDMAN_LIST_ENTRY_X86:
def __init__(self, reader):
#IMPORTANT NOTICE, THE STRUCTURE STARTS BEFORE THE FLINK/BLINK POINTER, SO WE NEED TO READ BACKWARDS
#
reader.move(reader.tell() - 32)
reader.align() #not sure if it's needed here
#
self.cbEncPassword = ULONG(reader).value
reader.align()
self.encPassword = PWSTR(reader)
self.unk0 = ULONG(reader).value
self.unk1 = ULONG(reader).value
self.unk2 = PVOID(reader)
self.unk3 = PVOID(reader)
self.UserName = PWSTR(reader)
self.cbUserName = ULONG(reader).value
reader.align()
self.Flink = PKIWI_CREDMAN_LIST_ENTRY(reader)
self.Blink = PKIWI_CREDMAN_LIST_ENTRY(reader)
self.unk4 = LIST_ENTRY(reader)
self.type = LSA_UNICODE_STRING(reader)
self.unk5 = PVOID(reader)
self.server1 = LSA_UNICODE_STRING(reader)
self.unk6 = PVOID(reader)
self.unk7 = PVOID(reader)
self.unk8 = PVOID(reader)
self.unk9 = PVOID(reader)
self.unk10 = PVOID(reader)
self.user = LSA_UNICODE_STRING(reader)
self.unk11 = ULONG(reader).value
reader.align()
self.server2 = LSA_UNICODE_STRING(reader)
class PKIWI_CREDMAN_LIST_ENTRY_5(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_LIST_ENTRY_5)
class KIWI_CREDMAN_LIST_ENTRY_5:
def __init__(self, reader):
#IMPORTANT NOTICE, THE STRUCTURE STARTS BEFORE THE FLINK/BLINK POINTER, SO WE NEED TO READ BACKWARDS
#
reader.move(reader.tell() - 56)
reader.align() #not sure if it's needed here
#
self.cbEncPassword = ULONG(reader).value
reader.align()
self.encPassword = PWSTR
self.unk0 = ULONG(reader).value
self.unk1 = ULONG(reader).value
self.unk2 = PVOID(reader)
self.unk3 = PVOID(reader)
self.UserName = PWSTR(reader)
self.cbUserName = ULONG(reader).value
reader.align()
self.Flink = PKIWI_CREDMAN_LIST_ENTRY_5
self.Blink = PKIWI_CREDMAN_LIST_ENTRY_5
self.server1 = LSA_UNICODE_STRING
self.unk6 = PVOID(reader)
self.unk7 = PVOID(reader)
self.user = LSA_UNICODE_STRING(reader)
self.unk8 = ULONG(reader).value
reader.align()
self.server2 = LSA_UNICODE_STRING
class PKIWI_CREDMAN_LIST_ENTRY_60(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_LIST_ENTRY_60)
class KIWI_CREDMAN_LIST_ENTRY_60:
def __init__(self, reader):
#IMPORTANT NOTICE, THE STRUCTURE STARTS BEFORE THE FLINK/BLINK POINTER, SO WE NEED TO READ BACKWARDS
#
reader.move(reader.tell() - 56)
reader.align() #not sure if it's needed here
#
#input('KIWI_CREDMAN_LIST_ENTRY_60 \n%s' % hexdump(reader.peek(0x200), start = reader.tell()))
#
self.cbEncPassword = ULONG(reader).value
reader.align()
self.encPassword = PWSTR(reader)
self.unk0 = ULONG(reader).value
self.unk1 = ULONG(reader).value
self.unk2 = PVOID(reader)
self.unk3 = PVOID(reader)
self.UserName = PWSTR(reader)
self.cbUserName = ULONG(reader).value
reader.align()
self.Flink = PKIWI_CREDMAN_LIST_ENTRY_60
self.Blink = PKIWI_CREDMAN_LIST_ENTRY_60
self.type = LSA_UNICODE_STRING(reader)
self.unk5 = PVOID(reader)
self.server1 = LSA_UNICODE_STRING(reader)
self.unk6 = PVOID(reader)
self.unk7 = PVOID(reader)
self.unk8 = PVOID(reader)
self.unk9 = PVOID(reader)
self.unk10 = PVOID(reader)
self.user = LSA_UNICODE_STRING(reader)
self.unk11 = ULONG(reader).value
reader.align()
self.server2 = LSA_UNICODE_STRING(reader)
class PKIWI_CREDMAN_LIST_ENTRY(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_LIST_ENTRY)
class KIWI_CREDMAN_LIST_ENTRY:
def __init__(self, reader):
#IMPORTANT NOTICE, THE STRUCTURE STARTS BEFORE THE FLINK/BLINK POINTER, SO WE NEED TO READ BACKWARDS
#
#input('KIWI_CREDMAN_LIST_ENTRY \n%s' % hexdump(reader.peek(0x50), start = reader.tell()))
reader.move(reader.tell() - 56)
reader.align() #not sure if it's needed here
#input('KIWI_CREDMAN_LIST_ENTRY \n%s' % hexdump(reader.peek(0x200), start = reader.tell()))
#
self.cbEncPassword = ULONG(reader).value
reader.align()
self.encPassword = PWSTR(reader)
self.unk0 = ULONG(reader).value
self.unk1 = ULONG(reader).value
self.unk2 = PVOID(reader)
self.unk3 = PVOID(reader)
self.UserName = PWSTR(reader)
self.cbUserName = ULONG(reader).value
reader.align()
self.Flink = PKIWI_CREDMAN_LIST_ENTRY(reader)
self.Blink = PKIWI_CREDMAN_LIST_ENTRY(reader)
self.unk4 = LIST_ENTRY(reader)
self.type = LSA_UNICODE_STRING(reader)
self.unk5 = PVOID(reader)
self.server1 = LSA_UNICODE_STRING(reader)
self.unk6 = PVOID(reader)
self.unk7 = PVOID(reader)
self.unk8 = PVOID(reader)
self.unk9 = PVOID(reader)
self.unk10 = PVOID(reader)
self.user = LSA_UNICODE_STRING(reader)
self.unk11 = ULONG(reader).value
reader.align()
self.server2 = LSA_UNICODE_STRING(reader)
class PKIWI_CREDMAN_LIST_STARTER(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_LIST_STARTER)
class KIWI_CREDMAN_LIST_STARTER:
def __init__(self, reader):
self.unk0 = ULONG(reader)
reader.align()
self.start = PKIWI_CREDMAN_LIST_ENTRY(reader)
#...
class PKIWI_CREDMAN_SET_LIST_ENTRY(POINTER):
def __init__(self, reader):
super().__init__(reader, KIWI_CREDMAN_SET_LIST_ENTRY)
class KIWI_CREDMAN_SET_LIST_ENTRY:
def __init__(self, reader):
self.Flink = PKIWI_CREDMAN_SET_LIST_ENTRY(reader)
self.Blink = PKIWI_CREDMAN_SET_LIST_ENTRY(reader)
self.unk0 = ULONG(reader).value
reader.align()
self.list1 = PKIWI_CREDMAN_LIST_STARTER(reader)
self.list2 = PKIWI_CREDMAN_LIST_STARTER(reader)
| 32.350877
| 102
| 0.749783
| 1,297
| 9,220
| 5.046261
| 0.09175
| 0.114591
| 0.100229
| 0.067227
| 0.921161
| 0.882964
| 0.845684
| 0.83453
| 0.822002
| 0.822002
| 0
| 0.022753
| 0.141974
| 9,220
| 285
| 103
| 32.350877
| 0.804576
| 0.128091
| 0
| 0.76652
| 0
| 0
| 0.002124
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.079295
| false
| 0.052863
| 0.022026
| 0
| 0.180617
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
f70765eebcf4a1048f40d5dbf0d0748b1d2c2301
| 1,654
|
py
|
Python
|
flavio/physics/quarkonium/test_Vllgamma.py
|
micha-a-schmidt/flavio
|
fb89a11cdf45e536f2d72de8a4a2657130c4e09f
|
[
"MIT"
] | null | null | null |
flavio/physics/quarkonium/test_Vllgamma.py
|
micha-a-schmidt/flavio
|
fb89a11cdf45e536f2d72de8a4a2657130c4e09f
|
[
"MIT"
] | null | null | null |
flavio/physics/quarkonium/test_Vllgamma.py
|
micha-a-schmidt/flavio
|
fb89a11cdf45e536f2d72de8a4a2657130c4e09f
|
[
"MIT"
] | 1
|
2017-11-09T01:40:01.000Z
|
2017-11-09T01:40:01.000Z
|
import unittest
import flavio
from wilson import Wilson
from .Vllgamma import *
### implement test
class TestVllgamma(unittest.TestCase):
def test_np(self):
wc,br=Wilson({'CVRR_muecc' : 1e-2},scale=2.,eft='WET',basis='flavio'),8.3949e-6
self.assertAlmostEqual(flavio.np_prediction('BR(J/psi->muegamma)',wc), br,delta=0.01*br)
self.assertAlmostEqual(flavio.np_prediction('R(J/psi->muegamma)',wc),flavio.np_prediction('BR(J/psi->muegamma)',wc)/flavio.np_prediction('BR(J/psi->ee)',wc),delta=0.001*br)
wc,br=Wilson({'CSRR_muecc' : 1e-2},scale=2.,eft='WET',basis='flavio'),6.2935e-6
self.assertAlmostEqual(flavio.np_prediction('BR(J/psi->muegamma)',wc), br,delta=0.01*br)
self.assertAlmostEqual(flavio.np_prediction('R(J/psi->muegamma)',wc),flavio.np_prediction('BR(J/psi->muegamma)',wc)/flavio.np_prediction('BR(J/psi->ee)',wc),delta=0.001*br)
wc,br=Wilson({'CVRR_tauecc' : 1e-2},scale=2.,eft='WET',basis='flavio'),1.2887e-6
self.assertAlmostEqual(flavio.np_prediction('BR(J/psi->tauegamma)',wc), br,delta=0.01*br)
self.assertAlmostEqual(flavio.np_prediction('R(J/psi->tauegamma)',wc),flavio.np_prediction('BR(J/psi->tauegamma)',wc)/flavio.np_prediction('BR(J/psi->ee)',wc),delta=0.001*br)
wc,br=Wilson({'CSRR_tauecc' : 1e-2},scale=2.,eft='WET',basis='flavio'),9.1097e-7
self.assertAlmostEqual(flavio.np_prediction('BR(J/psi->tauegamma)',wc), br,delta=0.01*br)
self.assertAlmostEqual(flavio.np_prediction('R(J/psi->tauegamma)',wc),flavio.np_prediction('BR(J/psi->tauegamma)',wc)/flavio.np_prediction('BR(J/psi->ee)',wc),delta=0.001*br)
| 66.16
| 182
| 0.688029
| 265
| 1,654
| 4.215094
| 0.177358
| 0.114593
| 0.257833
| 0.214861
| 0.849597
| 0.849597
| 0.849597
| 0.849597
| 0.849597
| 0.7359
| 0
| 0.042953
| 0.099154
| 1,654
| 25
| 183
| 66.16
| 0.706711
| 0.008464
| 0
| 0.444444
| 0
| 0
| 0.219914
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.055556
| false
| 0
| 0.222222
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f770589df0a31fcecdb564485195a7980f0d12f9
| 2,251
|
py
|
Python
|
mtsoo_noisy/tasks.py
|
thanhbok26b/mtsoo-noisy
|
9b36d75e5be3d0e0fd05f95137c37550d89f40b5
|
[
"MIT"
] | null | null | null |
mtsoo_noisy/tasks.py
|
thanhbok26b/mtsoo-noisy
|
9b36d75e5be3d0e0fd05f95137c37550d89f40b5
|
[
"MIT"
] | null | null | null |
mtsoo_noisy/tasks.py
|
thanhbok26b/mtsoo-noisy
|
9b36d75e5be3d0e0fd05f95137c37550d89f40b5
|
[
"MIT"
] | null | null | null |
from .functions import *
from scipy.io import loadmat
import os
DIRNAME = os.path.dirname(__file__)
class CI_HS:
def __init__(self):
mat = loadmat(os.path.join(DIRNAME, 'data/CI_H.mat'))
self.M1 = mat['Rotation_Task1']
self.M2 = mat['Rotation_Task2']
self.functions = [self.f1, self.f2]
self.dim = 50
def f1(self, x):
return moderate_noise(griewank(self.M1 @ (x * 200 - 100)))
def f2(self, x):
return moderate_noise(rastrigin(self.M2 @ (x * 100 - 50)))
class CI_MS:
def __init__(self):
mat = loadmat(os.path.join(DIRNAME, 'data/CI_M.mat'))
self.M1 = mat['Rotation_Task1']
self.M2 = mat['Rotation_Task2']
self.functions = [self.f1, self.f2]
self.dim = 50
def f1(self, x):
return moderate_noise(ackley(self.M1 @ (x * 100 - 50)))
def f2(self, x):
return moderate_noise(rastrigin(self.M2 @ (x * 100 - 50)))
class CI_LS:
def __init__(self):
mat = loadmat(os.path.join(DIRNAME, 'data/CI_L.mat'))
self.M1 = mat['Rotation_Task1']
self.O1 = mat['GO_Task1'][0]
self.functions = [self.f1, self.f2]
self.dim = 50
def f1(self, x):
return moderate_noise(ackley(self.M1 @ (x * 100 - 50 - self.O1)))
def f2(self, x):
return moderate_noise(schwefel(x * 1000 - 500))
class NI_HS:
def __init__(self):
mat = loadmat(os.path.join(DIRNAME, 'data/NI_H.mat'))
self.O1 = np.ones([50])
self.M2 = mat['Rotation_Task2']
self.functions = [self.f1, self.f2]
self.dim = 50
def f1(self, x):
return moderate_noise(rosenbrock(x * 100 - 50 - self.O1))
def f2(self, x):
return moderate_noise(rastrigin(self.M2 @ (x * 100 - 50)))
class NI_MS:
def __init__(self):
mat = loadmat(os.path.join(DIRNAME, 'data/NI_M.mat'))
self.M1 = mat['Rotation_Task1']
self.O1 = mat['GO_Task1'][0]
self.M2 = mat['Rotation_Task2']
self.functions = [self.f1, self.f2]
self.dim = 50
def f1(self, x):
return moderate_noise(griewank(self.M1 @ (x * 200 - 100 - self.O1)))
def f2(self, x):
return moderate_noise(weierstrass(self.M2 @ (x - 0.5)))
| 27.45122
| 76
| 0.581519
| 329
| 2,251
| 3.81459
| 0.164134
| 0.047809
| 0.087649
| 0.151394
| 0.873307
| 0.873307
| 0.873307
| 0.873307
| 0.871713
| 0.843825
| 0
| 0.070866
| 0.266548
| 2,251
| 81
| 77
| 27.790123
| 0.689279
| 0
| 0
| 0.633333
| 0
| 0
| 0.08574
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.05
| 0.166667
| 0.55
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
f7708f66bcc19dd5f992f1e2ad16676b1e49469f
| 2,451
|
py
|
Python
|
src/backend/database_migrations/versions/20210224_105823_make_read_reads.py
|
chanzuckerberg/czgenepi
|
87bd2b1739acdfe2c7c25663fafb01dc24c5e2fd
|
[
"MIT"
] | 5
|
2021-02-04T20:18:46.000Z
|
2021-09-09T13:42:42.000Z
|
src/backend/database_migrations/versions/20210224_105823_make_read_reads.py
|
chanzuckerberg/aspen
|
9853778a7ef68b0446751657af5a835f98dde3dc
|
[
"MIT"
] | 422
|
2021-01-30T04:16:00.000Z
|
2022-01-31T23:18:44.000Z
|
src/backend/database_migrations/versions/20210224_105823_make_read_reads.py
|
chanzuckerberg/covidr
|
afe05d703d30ec18ac83944bfb551c313cb216c4
|
[
"MIT"
] | 1
|
2021-05-20T14:54:39.000Z
|
2021-05-20T14:54:39.000Z
|
"""make read -> reads
Create Date: 2021-02-24 10:58:25.108079
"""
import enumtables # noqa: F401
from alembic import op
# revision identifiers, used by Alembic.
revision = "20210224_105823"
down_revision = "20210222_220412"
branch_labels = None
depends_on = None
def upgrade():
op.rename_table(
"sequencing_read_collections",
"sequencing_reads_collections",
schema="aspen",
)
op.rename_table(
"host_filtered_sequencing_read_collections",
"host_filtered_sequencing_reads_collections",
schema="aspen",
)
op.drop_constraint(
"uq_host_filtered_sequencing_read_collections_s3_bucket",
"host_filtered_sequencing_reads_collections",
schema="aspen",
type_="unique",
)
op.create_unique_constraint(
op.f("uq_host_filtered_sequencing_reads_collections_s3_bucket"),
"host_filtered_sequencing_reads_collections",
["s3_bucket", "s3_key"],
schema="aspen",
)
op.drop_constraint(
"uq_sequencing_read_collections_s3_bucket",
"sequencing_reads_collections",
schema="aspen",
type_="unique",
)
op.create_unique_constraint(
op.f("uq_sequencing_reads_collections_s3_bucket"),
"sequencing_reads_collections",
["s3_bucket", "s3_key"],
schema="aspen",
)
def downgrade():
op.drop_constraint(
op.f("uq_sequencing_reads_collections_s3_bucket"),
"sequencing_reads_collections",
schema="aspen",
type_="unique",
)
op.create_unique_constraint(
"uq_sequencing_read_collections_s3_bucket",
"sequencing_reads_collections",
["s3_bucket", "s3_key"],
schema="aspen",
)
op.drop_constraint(
op.f("uq_host_filtered_sequencing_reads_collections_s3_bucket"),
"host_filtered_sequencing_reads_collections",
schema="aspen",
type_="unique",
)
op.create_unique_constraint(
"uq_host_filtered_sequencing_read_collections_s3_bucket",
"host_filtered_sequencing_reads_collections",
["s3_bucket", "s3_key"],
schema="aspen",
)
op.rename_table(
"sequencing_reads_collections",
"sequencing_read_collections",
schema="aspen",
)
op.rename_table(
"host_filtered_sequencing_reads_collections",
"host_filtered_sequencing_read_collections",
schema="aspen",
)
| 27.852273
| 72
| 0.666667
| 258
| 2,451
| 5.833333
| 0.205426
| 0.159468
| 0.276412
| 0.143522
| 0.813289
| 0.744186
| 0.726246
| 0.693688
| 0.693688
| 0.61794
| 0
| 0.035582
| 0.231742
| 2,451
| 87
| 73
| 28.172414
| 0.763675
| 0.04488
| 0
| 0.736842
| 0
| 0
| 0.475986
| 0.401372
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0.026316
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f780b6219a2f4ceadd772fd3f376444b7c9afebe
| 1,036
|
py
|
Python
|
Tools/GUI/BlueOS_support_functions.py
|
speedbug78/BlueOS
|
69f711f6eb6ae3dc10939b48ee2c9bf98788aea3
|
[
"MIT"
] | null | null | null |
Tools/GUI/BlueOS_support_functions.py
|
speedbug78/BlueOS
|
69f711f6eb6ae3dc10939b48ee2c9bf98788aea3
|
[
"MIT"
] | null | null | null |
Tools/GUI/BlueOS_support_functions.py
|
speedbug78/BlueOS
|
69f711f6eb6ae3dc10939b48ee2c9bf98788aea3
|
[
"MIT"
] | null | null | null |
def draw_mem():
c_width = int( w.Canvas2.cget( "width" ))
c_height = int( w.Canvas2.cget( "height" ))
print( c_width )
box_start = c_width * 0.05
box_end = c_width * 0.95
mem_title = w.Canvas2.create_text(( c_width / 2 ), 10, fill = "black", font = "Times 10", text = "Flash" )
mem1 = w.Canvas2.create_rectangle( box_start, ( c_height * 0.1 ), box_end, ( c_height * 0.2 ), fill="blue" )
def resize():
c_width = int( w.Canvas2.cget( "width" ))
c_height = int( w.Canvas2.cget( "height" ))
w.Canvas2.coords( mem1, ( c_width * 0.05 ), ( c_height * 0.1 ), ( c_width * 0.95 ), ( c_height * 0.2 ))
c_width = int( w.Canvas2.cget( "width" ))
c_height = int( w.Canvas2.cget( "height" ))
print( c_width )
box_start = c_width * 0.05
box_end = c_width * 0.95
mem_title = w.Canvas2.create_text(( c_width / 2 ), 10, fill = "black", font = "Times 10", text = "Flash" )
mem1 = w.Canvas2.create_rectangle( box_start, ( c_height * 0.1 ), box_end, ( c_height * 0.2 ), fill="blue" )
| 39.846154
| 112
| 0.598456
| 169
| 1,036
| 3.449704
| 0.195266
| 0.133791
| 0.113208
| 0.154374
| 0.874786
| 0.874786
| 0.874786
| 0.874786
| 0.874786
| 0.874786
| 0
| 0.067332
| 0.225869
| 1,036
| 25
| 113
| 41.44
| 0.659601
| 0
| 0
| 0.842105
| 0
| 0
| 0.074324
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0
| 0
| 0.105263
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3d904b54e1c9acebb5c5a3742f9329f5ed83c7f
| 195
|
py
|
Python
|
Emergency_Notifier/formInterface/views.py
|
sachinmaurya17/Emergency_Notifier
|
1138a778c4671b94406d616233434f4f06cdf35b
|
[
"Apache-2.0"
] | null | null | null |
Emergency_Notifier/formInterface/views.py
|
sachinmaurya17/Emergency_Notifier
|
1138a778c4671b94406d616233434f4f06cdf35b
|
[
"Apache-2.0"
] | null | null | null |
Emergency_Notifier/formInterface/views.py
|
sachinmaurya17/Emergency_Notifier
|
1138a778c4671b94406d616233434f4f06cdf35b
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
def Login(request):
return render(request,'Html/Login.html')
def Signup(request):
return render(request,'Html/Signup.html')
| 24.375
| 45
| 0.748718
| 27
| 195
| 5.407407
| 0.555556
| 0.178082
| 0.260274
| 0.356164
| 0.410959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 195
| 8
| 45
| 24.375
| 0.863905
| 0.117949
| 0
| 0
| 0
| 0
| 0.181287
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
54192666f3cc16f084e8ee712e0bb0a7e0c0a04c
| 9,695
|
py
|
Python
|
tests/sets_tests.py
|
gmr/tredis
|
2e91c6a58a35460be0525c51ac6a98fde3b506ad
|
[
"BSD-3-Clause"
] | 22
|
2015-11-16T18:24:23.000Z
|
2019-01-22T06:41:51.000Z
|
tests/sets_tests.py
|
gmr/tredis
|
2e91c6a58a35460be0525c51ac6a98fde3b506ad
|
[
"BSD-3-Clause"
] | 8
|
2016-01-26T21:55:15.000Z
|
2020-11-17T18:00:13.000Z
|
tests/sets_tests.py
|
gmr/tredis
|
2e91c6a58a35460be0525c51ac6a98fde3b506ad
|
[
"BSD-3-Clause"
] | 9
|
2015-11-28T19:32:14.000Z
|
2020-10-19T06:47:26.000Z
|
import mock
from tornado import testing
from tredis import exceptions
from . import base
class SetTests(base.AsyncTestCase):
@testing.gen_test
def test_sadd_single(self):
key, value = self.uuid4(2)
result = yield self.client.sadd(key, value)
self.assertEqual(result, 1)
@testing.gen_test
def test_sadd_multiple(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.sadd(key, value1, value2, value3)
self.assertTrue(result)
@testing.gen_test
def test_sadd_multiple_dupe(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.sadd(key, value1, value2, value3, value3)
self.assertEqual(result, 3)
@testing.gen_test
def test_sadd_with_error(self):
key, value = self.uuid4(2)
self._execute_result = exceptions.RedisError('Test Exception')
with mock.patch.object(self.client, '_execute', self._execute):
with self.assertRaises(exceptions.RedisError):
yield self.client.sadd(key, value)
@testing.gen_test
def test_sdiff(self):
key1, key2, value1, value2, value3 = self.uuid4(5)
result = yield self.client.sadd(key1, value1, value2)
self.assertTrue(result)
result = yield self.client.sadd(key2, value1, value3)
self.assertTrue(result)
result = yield self.client.sdiff(key1, key2)
self.assertListEqual(result, [value2])
@testing.gen_test
def test_sdiffstore(self):
key1, key2, key3, value1, value2, value3 = self.uuid4(6)
result = yield self.client.sadd(key1, value1, value2)
self.assertTrue(result)
result = yield self.client.sadd(key2, value1, value3)
self.assertTrue(result)
result = yield self.client.sdiffstore(key3, key1, key2)
self.assertEqual(result, 1)
result = yield self.client.sismember(key3, value2)
self.assertTrue(result)
@testing.gen_test
def test_sinter(self):
key1, key2, value1, value2, value3 = self.uuid4(5)
result = yield self.client.sadd(key1, value1, value2)
self.assertTrue(result)
result = yield self.client.sadd(key2, value2, value3)
self.assertTrue(result)
result = yield self.client.sinter(key1, key2)
self.assertListEqual(result, [value2])
@testing.gen_test
def test_sinterstore(self):
key1, key2, key3, value1, value2, value3 = self.uuid4(6)
result = yield self.client.sadd(key1, value1, value2)
self.assertTrue(result)
result = yield self.client.sadd(key2, value2, value3)
self.assertTrue(result)
result = yield self.client.sinterstore(key3, key1, key2)
self.assertEqual(result, 1)
result = yield self.client.sismember(key3, value2)
self.assertTrue(result)
@testing.gen_test
def test_sadd_sismember_true(self):
key, value = self.uuid4(2)
result = yield self.client.sadd(key, value)
self.assertTrue(result)
result = yield self.client.sismember(key, value)
self.assertTrue(result)
@testing.gen_test
def test_sadd_sismember_false(self):
key, value1, value2 = self.uuid4(3)
result = yield self.client.sadd(key, value1)
self.assertTrue(result)
result = yield self.client.sismember(key, value2)
self.assertFalse(result)
@testing.gen_test
def test_scard(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.sadd(key, value1, value2, value3)
self.assertTrue(result)
result = yield self.client.scard(key)
self.assertEqual(result, 3)
@testing.gen_test
def test_smembers(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.sadd(key, value1, value2, value3)
self.assertTrue(result)
result = yield self.client.smembers(key)
self.assertListEqual(sorted(result), sorted([value1, value2, value3]))
@testing.gen_test
def test_smove(self):
key1, key2, value1 = self.uuid4(3)
result = yield self.client.sadd(key1, value1)
self.assertTrue(result)
result = yield self.client.smove(key1, key2, value1)
self.assertTrue(result)
result = yield self.client.sismember(key1, value1)
self.assertFalse(result)
result = yield self.client.sismember(key2, value1)
self.assertTrue(result)
@testing.gen_test
def test_spop(self):
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
member = yield self.client.spop(key)
self.assertIn(member, values)
members = yield self.client.smembers(key)
self.assertNotIn(member, members)
@testing.gen_test
def test_srandmember(self):
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
member = yield self.client.srandmember(key)
self.assertIn(member, values)
members = yield self.client.smembers(key)
self.assertIn(member, members)
@testing.gen_test
def test_srandmember_multi(self):
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
members = yield self.client.srandmember(key, 2)
for member in members:
self.assertIn(member, values)
self.assertEqual(len(members), 2)
@testing.gen_test
def test_srem(self):
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
result = yield self.client.srem(key, value2, value3)
self.assertTrue(result)
members = yield self.client.smembers(key)
self.assertNotIn(value2, members)
self.assertNotIn(value3, members)
@testing.gen_test
def test_srem_dupe(self):
key = self.uuid4()
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
result = yield self.client.srem(key, value2, value3, value3)
self.assertEqual(result, 2)
members = yield self.client.smembers(key)
self.assertNotIn(value2, members)
self.assertNotIn(value3, members)
@testing.gen_test
def test_srem_with_error(self):
key, value = self.uuid4(2)
self._execute_result = exceptions.RedisError('Test Exception')
with mock.patch.object(self.client, '_execute', self._execute):
with self.assertRaises(exceptions.RedisError):
yield self.client.srem(key, value)
@testing.gen_test
def test_sscan(self):
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
cursor, result = yield self.client.sscan(key, 0)
self.assertListEqual(sorted(result), sorted(values))
self.assertEqual(cursor, 0)
@testing.gen_test
def test_sscan_with_pattern(self):
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
cursor, result = yield self.client.sscan(key, 0, '*')
self.assertListEqual(sorted(result), sorted(values))
self.assertEqual(cursor, 0)
@testing.gen_test
def test_sscan_with_pattern_and_count(self):
key, value1, value2, value3 = self.uuid4(4)
values = [value1, value2, value3]
result = yield self.client.sadd(key, *values)
self.assertTrue(result)
cursor, result = yield self.client.sscan(key, 0, '*', 10)
self.assertListEqual(sorted(result), sorted(values))
self.assertEqual(cursor, 0)
@testing.gen_test
def test_sscan_with_error(self):
key = self.uuid4()
self._execute_result = exceptions.RedisError('Test Exception')
with mock.patch.object(self.client, '_execute', self._execute):
with self.assertRaises(exceptions.RedisError):
yield self.client.sscan(key, 0)
@testing.gen_test
def test_sunion(self):
key1, key2, key3, value1, value2, value3 = self.uuid4(6)
result = yield self.client.sadd(key1, value1, value2)
self.assertTrue(result)
result = yield self.client.sadd(key2, value2, value3)
self.assertTrue(result)
result = yield self.client.sunion(key1, key2)
self.assertListEqual(sorted(result), sorted([value1, value2, value3]))
@testing.gen_test
def test_suinionstore(self):
key1, key2, key3, value1, value2, value3 = self.uuid4(6)
result = yield self.client.sadd(key1, value1, value2)
self.assertTrue(result)
result = yield self.client.sadd(key2, value2, value3)
self.assertTrue(result)
result = yield self.client.sunionstore(key3, key1, key2)
self.assertEqual(result, 3)
result = yield self.client.sismember(key3, value1)
self.assertTrue(result)
result = yield self.client.sismember(key3, value2)
self.assertTrue(result)
result = yield self.client.sismember(key3, value3)
self.assertTrue(result)
| 38.019608
| 78
| 0.646313
| 1,169
| 9,695
| 5.288281
| 0.070145
| 0.103526
| 0.14801
| 0.173245
| 0.91362
| 0.905694
| 0.862504
| 0.838564
| 0.796668
| 0.742478
| 0
| 0.036658
| 0.243115
| 9,695
| 254
| 79
| 38.169291
| 0.805805
| 0
| 0
| 0.705357
| 0
| 0
| 0.007014
| 0
| 0
| 0
| 0
| 0
| 0.299107
| 1
| 0.111607
| false
| 0
| 0.017857
| 0
| 0.133929
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
541d54e82b852143ab41a7aa0b9d84a11b574426
| 5,933
|
py
|
Python
|
server/camphoric/migrations/0001_initial.py
|
evinism/camphoric
|
fb576f813a6dee366f59fdc9e2cac83fde61921a
|
[
"MIT"
] | 2
|
2020-09-25T01:20:14.000Z
|
2021-08-18T18:49:47.000Z
|
server/camphoric/migrations/0001_initial.py
|
evinism/camphoric
|
fb576f813a6dee366f59fdc9e2cac83fde61921a
|
[
"MIT"
] | 57
|
2020-05-30T03:22:56.000Z
|
2022-03-07T01:52:11.000Z
|
server/camphoric/migrations/0001_initial.py
|
evinism/camphoric
|
fb576f813a6dee366f59fdc9e2cac83fde61921a
|
[
"MIT"
] | 1
|
2020-01-24T04:30:07.000Z
|
2020-01-24T04:30:07.000Z
|
# Generated by Django 2.2.5 on 2019-09-21 19:32
from decimal import Decimal
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Deposit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('deleted_at', models.DateTimeField(null=True)),
('deposited_on', models.DateTimeField(null=True)),
('amount', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=7)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('deleted_at', models.DateTimeField(null=True)),
('name', models.CharField(max_length=255)),
('registration_start', models.DateTimeField(null=True)),
('registration_end', models.DateTimeField(null=True)),
('start', models.DateTimeField(null=True)),
('end', models.DateTimeField(null=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('deleted_at', models.DateTimeField(null=True)),
('name', models.CharField(max_length=255)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Registration',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('deleted_at', models.DateTimeField(null=True)),
('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Event')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Payment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('deleted_at', models.DateTimeField(null=True)),
('paid_on', models.DateTimeField(null=True)),
('amount', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=7)),
('deposit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Deposit')),
('registration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Registration')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Lodging',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('deleted_at', models.DateTimeField(null=True)),
('name', models.CharField(max_length=255)),
('capacity', models.IntegerField(default=0)),
('notes', models.TextField()),
('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Event')),
('parent', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='camphoric.Lodging')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='event',
name='organization',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Organization'),
),
migrations.AddField(
model_name='deposit',
name='event',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Event'),
),
migrations.CreateModel(
name='Camper',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('deleted_at', models.DateTimeField(null=True)),
('lodging', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Lodging')),
('registration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camphoric.Registration')),
],
options={
'abstract': False,
},
),
]
| 45.290076
| 126
| 0.559751
| 555
| 5,933
| 5.830631
| 0.145946
| 0.158529
| 0.136279
| 0.108158
| 0.827874
| 0.789555
| 0.726205
| 0.726205
| 0.726205
| 0.726205
| 0
| 0.008424
| 0.29968
| 5,933
| 130
| 127
| 45.638462
| 0.770397
| 0.007585
| 0
| 0.674797
| 1
| 0
| 0.119096
| 0.011213
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02439
| 0
| 0.056911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5816fbc08eff81999e3348134a473bd3ac12c8a2
| 1,214
|
py
|
Python
|
spines/timeseries/ts_toolsets.py
|
BirchKwok/spines
|
3b26ead3b56780e846686847c293a7d890fefc8f
|
[
"Apache-2.0"
] | 1
|
2021-06-17T08:56:29.000Z
|
2021-06-17T08:56:29.000Z
|
spines/timeseries/ts_toolsets.py
|
BirchKwok/spines
|
3b26ead3b56780e846686847c293a7d890fefc8f
|
[
"Apache-2.0"
] | null | null | null |
spines/timeseries/ts_toolsets.py
|
BirchKwok/spines
|
3b26ead3b56780e846686847c293a7d890fefc8f
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
import numpy as np
def _split_sequences(x_seq: pd.Series, y_seq: pd.Series, window_size, pred_days):
assert isinstance(x_seq, pd.Series) is True and isinstance(y_seq, pd.Series) is True
x_seq = x_seq.values
y_seq = y_seq.values
X, y = [], []
for i in range(len(x_seq)):
end_index = i + window_size
out_end_index = end_index + pred_days
if out_end_index > len(x_seq):
break
seq_x, seq_y = x_seq[i:end_index], y_seq[end_index:out_end_index]
X.append(seq_x)
y.append(seq_y)
return np.array(X), np.array(y)
def _split_arrays(x_seq: pd.Series, y_seq: pd.Series, window_size, pred_days):
assert isinstance(x_seq, pd.Series) is True and isinstance(y_seq, pd.Series) is True
x_seq = x_seq.values
y_seq = y_seq.values
X, y = [], []
for i in range(len(x_seq)):
end_index = i + window_size
out_end_index = end_index + pred_days
if out_end_index > len(x_seq):
break
seq_x, seq_y = list(x_seq[i:end_index]), list(y_seq[end_index:out_end_index])
X.append(seq_x)
y.append(seq_y)
return np.array(X), np.squeeze(np.array(y))
| 24.77551
| 88
| 0.635091
| 214
| 1,214
| 3.303738
| 0.182243
| 0.090523
| 0.12447
| 0.067893
| 0.871287
| 0.834512
| 0.834512
| 0.834512
| 0.834512
| 0.834512
| 0
| 0
| 0.253707
| 1,214
| 48
| 89
| 25.291667
| 0.780353
| 0
| 0
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
582acbe6359c34d5e113e18f4c86b0bb1db8952c
| 3,009
|
py
|
Python
|
allure-pytest-bdd/test/py_file_builder_test.py
|
Duisus/allure-python
|
09402db43da00bb3edb59767d5cc3826457c3f1a
|
[
"Apache-2.0"
] | 1
|
2021-01-08T12:52:32.000Z
|
2021-01-08T12:52:32.000Z
|
allure-pytest-bdd/test/py_file_builder_test.py
|
Duisus/allure-python
|
09402db43da00bb3edb59767d5cc3826457c3f1a
|
[
"Apache-2.0"
] | null | null | null |
allure-pytest-bdd/test/py_file_builder_test.py
|
Duisus/allure-python
|
09402db43da00bb3edb59767d5cc3826457c3f1a
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from .py_file_builder import PyFileBuilder
def test_common_func():
imports = ["pytest", "pytest_bdd", "allure"]
funcs = [
"""@given("given_step")
def given_func():
allure.attach("blah", ...)
raise Exception("message")""",
"""@when("when_step")
def when_func():
allure.attach("blah", ...)
raise Exception("message")""",
"""@then("then_step")
def then_func():
allure.attach("blah", ...)
raise Exception("message")"""
]
expected_answer = """import pytest
import pytest_bdd
import allure
@given("given_step")
def given_func():
allure.attach("blah", ...)
raise Exception("message")
@when("when_step")
def when_func():
allure.attach("blah", ...)
raise Exception("message")
@then("then_step")
def then_func():
allure.attach("blah", ...)
raise Exception("message")"""
file_builder = PyFileBuilder("test")
file_builder.add_imports(*imports)
for func in funcs:
file_builder.add_func(func)
assert file_builder.get_content() == expected_answer
def test_without_imports_func():
funcs = [
"""@given("given_step")
def given_func():
allure.attach("blah", ...)
raise Exception("message")""",
"""@when("when_step")
def when_func():
allure.attach("blah", ...)
raise Exception("message")""",
"""@then("then_step")
def then_func():
allure.attach("blah", ...)
raise Exception("message")"""
]
expected_answer = """@given("given_step")
def given_func():
allure.attach("blah", ...)
raise Exception("message")
@when("when_step")
def when_func():
allure.attach("blah", ...)
raise Exception("message")
@then("then_step")
def then_func():
allure.attach("blah", ...)
raise Exception("message")"""
file_builder = PyFileBuilder("test")
file_builder.add_imports()
for func in funcs:
file_builder.add_func(func)
assert file_builder.get_content() == expected_answer
def test_empty_func_str():
funcs = [
"",
"""@when("when_step")
def when_func():
allure.attach("blah", ...)
raise Exception("message")""",
"""@then("then_step")
def then_func():
allure.attach("blah", ...)
raise Exception("message")"""
]
expected_answer = """
@when("when_step")
def when_func():
allure.attach("blah", ...)
raise Exception("message")
@then("then_step")
def then_func():
allure.attach("blah", ...)
raise Exception("message")"""
file_builder = PyFileBuilder("test")
file_builder.add_imports()
for func in funcs:
file_builder.add_func(func)
assert file_builder.get_content() == expected_answer
def test_have_no_added_funcs():
imports = ["pytest", "pytest_bdd", "allure"]
funcs = []
file_builder = PyFileBuilder("test")
file_builder.add_imports(*imports)
for func in funcs:
file_builder.add_func(func)
with pytest.raises(Exception):
file_builder.get_content()
| 20.331081
| 56
| 0.620472
| 346
| 3,009
| 5.156069
| 0.115607
| 0.104821
| 0.143498
| 0.179372
| 0.887332
| 0.887332
| 0.853139
| 0.853139
| 0.853139
| 0.853139
| 0
| 0
| 0.202725
| 3,009
| 147
| 57
| 20.469388
| 0.743643
| 0
| 0
| 0.692308
| 0
| 0
| 0.416705
| 0.0771
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.051282
| false
| 0
| 0.153846
| 0
| 0.205128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5869a897269e7b22b6f16ec69c7f66fecf9b9831
| 4,807
|
py
|
Python
|
rest_framework_bulk/generics.py
|
xordoquy/django-rest-framework-bulk
|
484df717a790591a7bc58d5fed34f958ae82929a
|
[
"MIT"
] | 1
|
2019-08-20T02:08:33.000Z
|
2019-08-20T02:08:33.000Z
|
rest_framework_bulk/generics.py
|
xordoquy/django-rest-framework-bulk
|
484df717a790591a7bc58d5fed34f958ae82929a
|
[
"MIT"
] | null | null | null |
rest_framework_bulk/generics.py
|
xordoquy/django-rest-framework-bulk
|
484df717a790591a7bc58d5fed34f958ae82929a
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals, print_function
from rest_framework import mixins
from rest_framework.generics import GenericAPIView
from . import mixins as bulk_mixins
__all__ = ["BulkCreateAPIView", "BulkUpdateAPIView", "BulkDestroyAPIView", "ListBulkCreateAPIView",
"ListCreateBulkUpdateAPIView", "ListCreateBulkUpdateDestroyAPIView", "ListBulkCreateUpdateAPIView",
"ListBulkCreateUpdateDestroyAPIView"]
##########################################################
### Concrete view classes that provide method handlers ###
### by composing the mixin classes with the base view. ###
##########################################################
class BulkCreateAPIView(bulk_mixins.BulkCreateModelMixin,
GenericAPIView):
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class BulkUpdateAPIView(bulk_mixins.BulkUpdateModelMixin,
GenericAPIView):
def put(self, request, *args, **kwargs):
return self.bulk_update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.partial_bulk_update(request, *args, **kwargs)
class BulkDestroyAPIView(bulk_mixins.BulkDestroyModelMixin,
GenericAPIView):
def delete(self, request, *args, **kwargs):
return self.bulk_destroy(request, *args, **kwargs)
class ListBulkCreateAPIView(mixins.ListModelMixin,
bulk_mixins.BulkCreateModelMixin,
GenericAPIView):
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class ListCreateBulkUpdateAPIView(mixins.ListModelMixin,
mixins.CreateModelMixin,
bulk_mixins.BulkUpdateModelMixin,
GenericAPIView):
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.bulk_update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.partial_bulk_update(request, *args, **kwargs)
class ListCreateBulkUpdateDestroyAPIView(mixins.ListModelMixin,
mixins.CreateModelMixin,
bulk_mixins.BulkUpdateModelMixin,
bulk_mixins.BulkDestroyModelMixin,
GenericAPIView):
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.bulk_update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.partial_bulk_update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.bulk_destroy(request, *args, **kwargs)
class ListBulkCreateUpdateAPIView(mixins.ListModelMixin,
bulk_mixins.BulkCreateModelMixin,
bulk_mixins.BulkUpdateModelMixin,
GenericAPIView):
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.bulk_update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.partial_bulk_update(request, *args, **kwargs)
class ListBulkCreateUpdateDestroyAPIView(mixins.ListModelMixin,
bulk_mixins.BulkCreateModelMixin,
bulk_mixins.BulkUpdateModelMixin,
bulk_mixins.BulkDestroyModelMixin,
GenericAPIView):
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.bulk_update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.partial_bulk_update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.bulk_destroy(request, *args, **kwargs)
| 39.401639
| 110
| 0.597254
| 429
| 4,807
| 6.592075
| 0.135198
| 0.186704
| 0.288543
| 0.178218
| 0.777581
| 0.712871
| 0.712871
| 0.683168
| 0.647808
| 0.647808
| 0
| 0
| 0.272103
| 4,807
| 121
| 111
| 39.727273
| 0.808231
| 0.021219
| 0
| 0.817073
| 0
| 0
| 0.042623
| 0.031257
| 0
| 0
| 0
| 0
| 0
| 1
| 0.292683
| false
| 0
| 0.04878
| 0.292683
| 0.731707
| 0.012195
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
58a192c907cc8f0b570a614d857c40f361ed88f1
| 11,874
|
py
|
Python
|
saleor/dashboard/customer/sales.py
|
glosoftgroup/KahawaHardware
|
893e94246583addf41c3bb0d58d2ce6bcd233c4f
|
[
"BSD-3-Clause"
] | 1
|
2020-01-22T04:35:31.000Z
|
2020-01-22T04:35:31.000Z
|
saleor/dashboard/customer/sales.py
|
glosoftgroup/KahawaHardware
|
893e94246583addf41c3bb0d58d2ce6bcd233c4f
|
[
"BSD-3-Clause"
] | 8
|
2018-05-07T16:42:35.000Z
|
2022-02-26T03:31:56.000Z
|
saleor/dashboard/customer/sales.py
|
glosoftgroup/tenants
|
a6b229ad1f6d567b7078f83425a532830b71e1bb
|
[
"BSD-3-Clause"
] | null | null | null |
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import get_object_or_404, redirect, render_to_response
from django.template.response import TemplateResponse
from django.db.models import Count, Min, Sum, Avg, F, Q
from django.core.paginator import Paginator, EmptyPage, InvalidPage, PageNotAnInteger
from django.http import HttpResponse, JsonResponse
# from datetime import date, timedelta
from django.utils.dateformat import DateFormat
import logging
import datetime
# from datetime import date
from ...utils import render_to_pdf, default_logo
from ..views import staff_member_required
from ...customer.models import Customer
from ...sale.models import Sales, SoldItem
debug_logger = logging.getLogger('debug_logger')
info_logger = logging.getLogger('info_logger')
error_logger = logging.getLogger('error_logger')
@staff_member_required
def sales_paginate(request):
page = int(request.GET.get('page'))
pk = int(request.GET.get('cpk'))
list_sz = request.GET.get('size')
date = request.GET.get('date')
action = request.GET.get('action')
p2_sz = request.GET.get('psize')
gid = request.GET.get('gid')
today_formart = DateFormat(datetime.date.today())
today = today_formart.format('Y-m-d')
ts = Sales.objects.filter(created__icontains=today)
tsum = ts.aggregate(Sum('total_net'))
total_sales = Sales.objects.aggregate(Sum('total_net'))
total_tax = Sales.objects.aggregate(Sum('total_tax'))
customer = get_object_or_404(Customer, pk=pk)
csales = Sales.objects.filter(customer=customer)
if request.GET.get('sth'):
all_sales = csales.filter(created__icontains=date).order_by('-id')
sales = []
for sale in all_sales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
if date:
try:
all_salesd = csales.filter(created__icontains=date).order_by('-id')
that_date_sum = csales.filter(created__contains=date).aggregate(Sum('total_net'))
sales = []
for sale in all_salesd:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
if p2_sz and gid:
paginator = Paginator(sales, int(p2_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html',
{'sales': sales, 'gid': date})
paginator = Paginator(sales, 10)
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/p2.html',
{'sales': sales, 'pn': paginator.num_pages, 'sz': 10, 'gid': date,
'total_sales': total_sales, 'total_tax': total_tax, 'tsum': tsum,
'that_date_sum': that_date_sum, 'date': date, 'today': today, 'customer':customer})
except ObjectDoesNotExist as e:
return TemplateResponse(request, 'dashboard/customer/sales/p2.html', {'date': date, 'customer':customer})
if action:
try:
all_sales2 = csales.filter(created__icontains=date).order_by('-id')
sales = []
for sale in all_sales2:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
if p2_sz and gid:
paginator = Paginator(sales, int(p2_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html',
{'sales': sales, 'gid': action, 'customer':customer})
paginator = Paginator(sales, 10)
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/p2.html',
{'sales': sales, 'pn': paginator.num_pages, 'sz': 10, 'gid': action,
'total_sales': total_sales, 'total_tax': total_tax, 'tsum': tsum, 'customer':customer})
except ObjectDoesNotExist as e:
return TemplateResponse(request, 'dashboard/customer/sales/p2.html', {'date': date, 'customer':customer})
else:
try:
last_sale = Sales.objects.latest('id')
all_sales = csales
sales = []
for sale in all_sales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
if gid:
date = gid
try:
all_sales2 = csales.filter(created__icontains=date).order_by('-id')
that_date = csales.filter(created__icontains=date)
that_date_sum = that_date.aggregate(Sum('total_net'))
sales = []
for sale in all_sales2:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
if p2_sz:
paginator = Paginator(sales, int(p2_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html',
{'sales': sales, 'gid': date, 'customer':customer})
paginator = Paginator(sales, 10)
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/p2.html',
{'sales': sales, 'pn': paginator.num_pages, 'sz': 10, 'gid': date,
'total_sales': total_sales, 'total_tax': total_tax, 'tsum': tsum,
'that_date_sum': that_date_sum, 'date': date, 'today': today, 'customer':customer})
except ObjectDoesNotExist as e:
return TemplateResponse(request, 'dashboard/customer/sales/p2.html', {'date': date, 'customer':customer})
if list_sz:
paginator = Paginator(sales, int(list_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/p2.html',
{'sales': sales, 'pn': paginator.num_pages, 'sz': list_sz, 'gid': 0,
'total_sales': total_sales, 'total_tax': total_tax, 'tsum': tsum, 'customer':customer})
else:
paginator = Paginator(sales, 10)
if p2_sz:
paginator = Paginator(sales, int(p2_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html', {'sales': sales, 'customer':customer})
try:
sales = paginator.page(page)
except PageNotAnInteger:
sales = paginator.page(1)
except InvalidPage:
sales = paginator.page(1)
except EmptyPage:
sales = paginator.page(paginator.num_pages)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html', {'sales': sales, 'customer':customer})
except ObjectDoesNotExist as e:
return TemplateResponse(request, 'dashboard/customer/sales/p2.html', {'date': date, 'customer':customer})
@staff_member_required
def sales_search(request):
if request.is_ajax():
pk = int(request.GET.get('cpk'))
page = int(request.GET.get('page', 1))
list_sz = request.GET.get('size')
p2_sz = request.GET.get('psize')
q = request.GET.get( 'q' )
if list_sz is None:
sz = 10
else:
sz = list_sz
if q is not None:
customer = get_object_or_404(Customer, pk=pk)
csales = Sales.objects.filter(customer=customer)
all_sales = csales.filter(
Q(invoice_number__icontains=q) |
Q(terminal__terminal_name__icontains=q) |
Q(created__icontains=q) |
Q(user__email__icontains=q) |
Q(customer__name__icontains=q) |
Q(user__name__icontains=q)).order_by('id')
sales = []
if request.GET.get('gid'):
csales = all_sales.filter(created__icontains=request.GET.get('gid'))
for sale in csales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
if p2_sz:
paginator = Paginator(sales, int(p2_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html', {'customer':customer,'sales': sales})
if list_sz:
paginator = Paginator(sales, int(list_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/search.html',
{'customer':customer, 'sales': sales, 'pn': paginator.num_pages, 'sz': list_sz,
'gid': request.GET.get('gid'), 'q': q})
paginator = Paginator(sales, 10)
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/search.html',
{'customer':customer, 'sales': sales, 'pn': paginator.num_pages, 'sz': sz,
'gid': request.GET.get('gid')})
else:
for sale in all_sales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
if list_sz:
print ('lst')
paginator = Paginator(sales, int(list_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/search.html',
{'customer':customer, 'sales': sales, 'pn': paginator.num_pages, 'sz': list_sz, 'gid': 0,
'q': q})
if p2_sz:
print ('pst')
paginator = Paginator(sales, int(p2_sz))
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html', {'customer':customer, 'sales': sales})
paginator = Paginator(sales, 10)
try:
sales = paginator.page(page)
except PageNotAnInteger:
sales = paginator.page(1)
except InvalidPage:
sales = paginator.page(1)
except EmptyPage:
sales = paginator.page(paginator.num_pages)
if p2_sz:
sales = paginator.page(page)
return TemplateResponse(request, 'dashboard/customer/sales/paginate.html', {'customer':customer, 'sales': sales})
return TemplateResponse(request, 'dashboard/customer/sales/search.html',
{'customer':customer, 'sales': sales, 'pn': paginator.num_pages, 'sz': sz, 'q': q})
@staff_member_required
def sales_list_pdf( request ):
if request.is_ajax():
q = request.GET.get( 'q' )
gid = request.GET.get('gid')
pk = int(request.GET.get('cpk'))
if gid:
gid = gid
else:
gid = None
sales = []
customer = get_object_or_404(Customer, pk=pk)
csales = Sales.objects.filter(customer=customer)
if q is not None:
all_sales = csales.filter(
Q(invoice_number__icontains=q) |
Q(terminal__terminal_name__icontains=q) |
Q(created__icontains=q) |
Q(user__email__icontains=q) |
Q(customer__name__icontains=q) |
Q(user__name__icontains=q)).order_by('id')
sales = []
if gid:
csales = all_sales.filter(created__icontains=gid)
for sale in csales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
else:
for sale in all_sales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
elif gid:
csales = csales.filter(created__icontains=gid)
for sale in csales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
else:
for sale in csales:
quantity = SoldItem.objects.filter(sales=sale).aggregate(c=Count('sku'))
setattr(sale, 'quantity', quantity['c'])
sales.append(sale)
img = default_logo()
data = {
'today': datetime.date.today(),
'sales': sales,
'puller': request.user,
'image': img,
'gid':gid,
'customer':customer
}
pdf = render_to_pdf('dashboard/customer/sales/pdf/saleslist.html', data)
return HttpResponse(pdf, content_type='application/pdf')
@staff_member_required
def sales_detail(request, pk=None):
try:
sale = Sales.objects.get(pk=pk)
items = SoldItem.objects.filter(sales=sale)
img = default_logo()
data = {
'today': datetime.date.today(),
'items': items,
'sale': sale,
'puller': request.user,
'image': img
}
pdf = render_to_pdf('dashboard/customer/sales/pdf/pdf.html',data)
return HttpResponse(pdf, content_type='application/pdf')
except ObjectDoesNotExist as e:
error_logger.error(e)
| 36.423313
| 118
| 0.68671
| 1,522
| 11,874
| 5.217477
| 0.094612
| 0.047475
| 0.049868
| 0.095706
| 0.825589
| 0.768165
| 0.738446
| 0.731394
| 0.721572
| 0.697393
| 0
| 0.006689
| 0.169025
| 11,874
| 326
| 119
| 36.423313
| 0.798115
| 0.005221
| 0
| 0.719298
| 0
| 0
| 0.138273
| 0.066384
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014035
| false
| 0
| 0.045614
| 0
| 0.136842
| 0.007018
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5459499b2abcd4991e728d4c59c147c98246142d
| 31,732
|
py
|
Python
|
tests/core/tests/resources.py
|
mdornseif/django-tastypie
|
b898311e9ff1f6a096d3c05c9843dbae5b5fcf4a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/core/tests/resources.py
|
mdornseif/django-tastypie
|
b898311e9ff1f6a096d3c05c9843dbae5b5fcf4a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/core/tests/resources.py
|
mdornseif/django-tastypie
|
b898311e9ff1f6a096d3c05c9843dbae5b5fcf4a
|
[
"BSD-3-Clause"
] | null | null | null |
import base64
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.http import HttpRequest, QueryDict
from django.test import TestCase
from tastypie.authentication import BasicAuthentication
from tastypie.representations.models import ModelRepresentation
from tastypie.resources import Resource
from tastypie.serializers import Serializer
from tastypie.throttle import CacheThrottle
from core.models import Note
class NoteRepresentation(ModelRepresentation):
class Meta:
queryset = Note.objects.filter(is_active=True)
def get_resource_uri(self):
return '/api/v1/notes/%s/' % self.instance.id
class DetailedNoteRepresentation(ModelRepresentation):
class Meta:
queryset = Note.objects.filter(is_active=True)
def get_resource_uri(self):
return '/api/v1/notes/%s/' % self.instance.id
class CustomSerializer(Serializer):
pass
class NoteResource(Resource):
representation = NoteRepresentation
resource_name = 'notes'
class ThrottledNoteResource(Resource):
representation = NoteRepresentation
resource_name = 'notes'
throttle = CacheThrottle(throttle_at=2, timeframe=5, expiration=5)
class ResourceTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_init(self):
# No representations.
self.assertRaises(ImproperlyConfigured, Resource)
# No detail representation.
self.assertRaises(ImproperlyConfigured, Resource, list_representation=NoteResource)
# No resource_name.
self.assertRaises(ImproperlyConfigured, Resource, representation=NoteResource)
# Very minimal & stock.
resource_1 = NoteResource()
self.assertEqual(issubclass(resource_1.list_representation, NoteRepresentation), True)
self.assertEqual(issubclass(resource_1.detail_representation, NoteRepresentation), True)
self.assertEqual(resource_1.resource_name, 'notes')
self.assertEqual(resource_1.limit, 20)
self.assertEqual(resource_1.list_allowed_methods, ['get', 'post', 'put', 'delete'])
self.assertEqual(resource_1.detail_allowed_methods, ['get', 'post', 'put', 'delete'])
self.assertEqual(isinstance(resource_1.serializer, Serializer), True)
# Lightly custom.
resource_2 = NoteResource(
representation=NoteRepresentation,
resource_name='noteish',
allowed_methods=['get'],
)
self.assertEqual(issubclass(resource_2.list_representation, NoteRepresentation), True)
self.assertEqual(issubclass(resource_2.detail_representation, NoteRepresentation), True)
self.assertEqual(resource_2.resource_name, 'noteish')
self.assertEqual(resource_2.limit, 20)
self.assertEqual(resource_2.list_allowed_methods, ['get'])
self.assertEqual(resource_2.detail_allowed_methods, ['get'])
self.assertEqual(isinstance(resource_2.serializer, Serializer), True)
# Highly custom.
resource_3 = NoteResource(
list_representation=NoteRepresentation,
detail_representation=DetailedNoteRepresentation,
limit=50,
resource_name='notey',
serializer=CustomSerializer(),
list_allowed_methods=['get'],
detail_allowed_methods=['get', 'post', 'put']
)
self.assertEqual(issubclass(resource_3.list_representation, NoteRepresentation), True)
self.assertEqual(issubclass(resource_3.detail_representation, DetailedNoteRepresentation), True)
self.assertEqual(resource_3.resource_name, 'notey')
self.assertEqual(resource_3.limit, 50)
self.assertEqual(resource_3.list_allowed_methods, ['get'])
self.assertEqual(resource_3.detail_allowed_methods, ['get', 'post', 'put'])
self.assertEqual(isinstance(resource_3.serializer, CustomSerializer), True)
def test_urls(self):
# The common case, where the ``Api`` specifies the name.
resource = NoteResource(api_name='v1')
patterns = resource.urls
self.assertEqual(len(patterns), 4)
self.assertEqual([pattern.name for pattern in patterns], ['api_dispatch_list', 'api_get_schema', 'api_get_multiple', 'api_dispatch_detail'])
self.assertEqual(reverse('api_dispatch_list', kwargs={
'api_name': 'v1',
'resource_name': 'notes',
}), '/api/v1/notes/')
self.assertEqual(reverse('api_dispatch_detail', kwargs={
'api_name': 'v1',
'resource_name': 'notes',
'obj_id': 1,
}), '/api/v1/notes/1/')
# Start over.
resource = NoteResource()
patterns = resource.urls
self.assertEqual(len(patterns), 4)
self.assertEqual([pattern.name for pattern in patterns], ['api_dispatch_list', 'api_get_schema', 'api_get_multiple', 'api_dispatch_detail'])
self.assertEqual(reverse('api_dispatch_list', urlconf='core.tests.manual_urls', kwargs={
'resource_name': 'notes',
}), '/notes/')
self.assertEqual(reverse('api_dispatch_detail', urlconf='core.tests.manual_urls', kwargs={
'resource_name': 'notes',
'obj_id': 1,
}), '/notes/1/')
def test_determine_format(self):
resource = NoteResource()
request = HttpRequest()
# Default.
self.assertEqual(resource.determine_format(request), 'application/json')
# Test forcing the ``format`` parameter.
request.GET = {'format': 'json'}
self.assertEqual(resource.determine_format(request), 'application/json')
request.GET = {'format': 'jsonp'}
self.assertEqual(resource.determine_format(request), 'text/javascript')
request.GET = {'format': 'xml'}
self.assertEqual(resource.determine_format(request), 'application/xml')
request.GET = {'format': 'yaml'}
self.assertEqual(resource.determine_format(request), 'text/yaml')
request.GET = {'format': 'foo'}
self.assertEqual(resource.determine_format(request), 'application/json')
# Test the ``Accept`` header.
request.META = {'HTTP_ACCEPT': 'application/json'}
self.assertEqual(resource.determine_format(request), 'application/json')
request.META = {'HTTP_ACCEPT': 'text/javascript'}
self.assertEqual(resource.determine_format(request), 'text/javascript')
request.META = {'HTTP_ACCEPT': 'application/xml'}
self.assertEqual(resource.determine_format(request), 'application/xml')
request.META = {'HTTP_ACCEPT': 'text/yaml'}
self.assertEqual(resource.determine_format(request), 'text/yaml')
request.META = {'HTTP_ACCEPT': 'text/html'}
self.assertEqual(resource.determine_format(request), 'text/html')
request.META = {'HTTP_ACCEPT': 'application/json,application/xml;q=0.9,*/*;q=0.8'}
self.assertEqual(resource.determine_format(request), 'application/json')
request.META = {'HTTP_ACCEPT': 'text/plain,application/xml,application/json;q=0.9,*/*;q=0.8'}
self.assertEqual(resource.determine_format(request), 'application/xml')
def test_get_list(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"meta": {"limit": 20, "next": null, "offset": 0, "previous": null, "total_count": 4}, "objects": [{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}, {"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}, {"content": "My neighborhood\'s been kinda weird lately, especially after the lava flow took out the corner store. Granny can hardly outrun the magma with her walker.", "created": "Thu, 1 Apr 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/4/", "slug": "recent-volcanic-activity", "title": "Recent Volcanic Activity.", "updated": "Thu, 1 Apr 2010 20:05:00 -0500"}, {"content": "Man, the second eruption came on fast. Granny didn\'t have a chance. On the upshot, I was able to save her walker and I got a cool shawl out of the deal!", "created": "Fri, 2 Apr 2010 10:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/6/", "slug": "grannys-gone", "title": "Granny\'s Gone", "updated": "Fri, 2 Apr 2010 10:05:00 -0500"}]}')
# Test slicing.
# First an invalid offset.
request.GET = {'format': 'json', 'offset': 'abc', 'limit': 1}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 400)
# Then an out of range offset.
request.GET = {'format': 'json', 'offset': -1, 'limit': 1}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 400)
# Then an out of range limit.
request.GET = {'format': 'json', 'offset': 0, 'limit': -1}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 400)
# Valid slice.
request.GET = {'format': 'json', 'offset': 0, 'limit': 2}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"meta": {"limit": 2, "next": null, "offset": 0, "previous": null, "total_count": 4}, "objects": [{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}, {"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}]}')
# Valid, slightly overlapping slice.
request.GET = {'format': 'json', 'offset': 1, 'limit': 2}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"meta": {"limit": 2, "next": null, "offset": 1, "previous": null, "total_count": 4}, "objects": [{"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}, {"content": "My neighborhood\'s been kinda weird lately, especially after the lava flow took out the corner store. Granny can hardly outrun the magma with her walker.", "created": "Thu, 1 Apr 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/4/", "slug": "recent-volcanic-activity", "title": "Recent Volcanic Activity.", "updated": "Thu, 1 Apr 2010 20:05:00 -0500"}]}')
# Valid, non-overlapping slice.
request.GET = {'format': 'json', 'offset': 3, 'limit': 2}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"meta": {"limit": 2, "next": null, "offset": 3, "previous": null, "total_count": 4}, "objects": [{"content": "Man, the second eruption came on fast. Granny didn\'t have a chance. On the upshot, I was able to save her walker and I got a cool shawl out of the deal!", "created": "Fri, 2 Apr 2010 10:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/6/", "slug": "grannys-gone", "title": "Granny\'s Gone", "updated": "Fri, 2 Apr 2010 10:05:00 -0500"}]}')
# Valid, but beyond the bounds slice.
request.GET = {'format': 'json', 'offset': 100, 'limit': 2}
resp = resource.get_list(request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"meta": {"limit": 2, "next": null, "offset": 100, "previous": null, "total_count": 4}, "objects": []}')
def test_get_detail(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
resp = resource.get_detail(request, obj_id=1)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}')
resp = resource.get_detail(request, obj_id=2)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}')
resp = resource.get_detail(request, obj_id=300)
self.assertEqual(resp.status_code, 410)
def test_put_list(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
self.assertEqual(Note.objects.count(), 6)
request.raw_post_data = '{"objects": [{"content": "The cat is back. The dog coughed him up out back.", "created": "2010-04-03 20:05:00", "is_active": true, "slug": "cat-is-back-again", "title": "The Cat Is Back", "updated": "2010-04-03 20:05:00"}]}'
resp = resource.put_list(request)
self.assertEqual(resp.status_code, 204)
self.assertEqual(Note.objects.count(), 3)
self.assertEqual(Note.objects.filter(is_active=True).count(), 1)
new_note = Note.objects.get(slug='cat-is-back-again')
self.assertEqual(new_note.content, "The cat is back. The dog coughed him up out back.")
def test_put_detail(self):
self.assertEqual(Note.objects.count(), 6)
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.raw_post_data = '{"content": "The cat is back. The dog coughed him up out back.", "created": "2010-04-03 20:05:00", "is_active": true, "slug": "cat-is-back", "title": "The Cat Is Back", "updated": "2010-04-03 20:05:00"}'
resp = resource.put_detail(request, obj_id=10)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Note.objects.count(), 7)
new_note = Note.objects.get(slug='cat-is-back')
self.assertEqual(new_note.content, "The cat is back. The dog coughed him up out back.")
request.raw_post_data = '{"content": "The cat is gone again. I think it was the rabbits that ate him this time.", "created": "2010-04-03 20:05:00", "is_active": true, "slug": "cat-is-back", "title": "The Cat Is Gone", "updated": "2010-04-03 20:05:00"}'
resp = resource.put_detail(request, obj_id=10)
self.assertEqual(resp.status_code, 204)
self.assertEqual(Note.objects.count(), 7)
new_note = Note.objects.get(slug='cat-is-back')
self.assertEqual(new_note.content, u'The cat is gone again. I think it was the rabbits that ate him this time.')
def test_post_list(self):
self.assertEqual(Note.objects.count(), 6)
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.raw_post_data = '{"content": "The cat is back. The dog coughed him up out back.", "created": "2010-04-03 20:05:00", "is_active": true, "slug": "cat-is-back", "title": "The Cat Is Back", "updated": "2010-04-03 20:05:00"}'
resp = resource.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Note.objects.count(), 7)
new_note = Note.objects.get(slug='cat-is-back')
self.assertEqual(new_note.content, "The cat is back. The dog coughed him up out back.")
def test_post_detail(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
resp = resource.post_detail(request, obj_id=2)
self.assertEqual(resp.status_code, 501)
def test_delete_list(self):
self.assertEqual(Note.objects.count(), 6)
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'DELETE'
resp = resource.delete_list(request)
self.assertEqual(resp.status_code, 204)
# Only the non-actives are left alive.
self.assertEqual(Note.objects.count(), 2)
def test_delete_detail(self):
self.assertEqual(Note.objects.count(), 6)
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'DELETE'
resp = resource.delete_detail(request, obj_id=2)
self.assertEqual(resp.status_code, 204)
self.assertEqual(Note.objects.count(), 5)
def test_dispatch_list(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.dispatch_list(request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"meta": {"limit": 20, "next": null, "offset": 0, "previous": null, "total_count": 4}, "objects": [{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}, {"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}, {"content": "My neighborhood\'s been kinda weird lately, especially after the lava flow took out the corner store. Granny can hardly outrun the magma with her walker.", "created": "Thu, 1 Apr 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/4/", "slug": "recent-volcanic-activity", "title": "Recent Volcanic Activity.", "updated": "Thu, 1 Apr 2010 20:05:00 -0500"}, {"content": "Man, the second eruption came on fast. Granny didn\'t have a chance. On the upshot, I was able to save her walker and I got a cool shawl out of the deal!", "created": "Fri, 2 Apr 2010 10:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/6/", "slug": "grannys-gone", "title": "Granny\'s Gone", "updated": "Fri, 2 Apr 2010 10:05:00 -0500"}]}')
def test_dispatch_detail(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.dispatch_detail(request, obj_id=1)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}')
def test_dispatch(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.dispatch('list', request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"meta": {"limit": 20, "next": null, "offset": 0, "previous": null, "total_count": 4}, "objects": [{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}, {"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}, {"content": "My neighborhood\'s been kinda weird lately, especially after the lava flow took out the corner store. Granny can hardly outrun the magma with her walker.", "created": "Thu, 1 Apr 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/4/", "slug": "recent-volcanic-activity", "title": "Recent Volcanic Activity.", "updated": "Thu, 1 Apr 2010 20:05:00 -0500"}, {"content": "Man, the second eruption came on fast. Granny didn\'t have a chance. On the upshot, I was able to save her walker and I got a cool shawl out of the deal!", "created": "Fri, 2 Apr 2010 10:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/6/", "slug": "grannys-gone", "title": "Granny\'s Gone", "updated": "Fri, 2 Apr 2010 10:05:00 -0500"}]}')
resp = resource.dispatch('detail', request, obj_id=1)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}')
def test_build_representation(self):
resource = NoteResource()
unpopulated_repr = resource.build_representation()
self.assertTrue(isinstance(unpopulated_repr, NoteRepresentation))
self.assertEqual(unpopulated_repr.title.value, None)
populated_repr = resource.build_representation(data={'title': 'Foo'})
self.assertTrue(isinstance(populated_repr, NoteRepresentation))
self.assertEqual(populated_repr.title.value, 'Foo')
def test_fetch_list(self):
resource = NoteResource()
object_list = resource.fetch_list()
self.assertEqual(len(object_list), 4)
self.assertEqual(object_list[0].title.value, u'First Post!')
def test_fetch_detail(self):
resource = NoteResource()
representation = resource.fetch_detail(obj_id=1)
self.assertTrue(isinstance(representation, NoteRepresentation))
self.assertEqual(representation.title.value, u'First Post!')
def test_jsonp_validation(self):
resource = NoteResource()
# invalid JSONP callback should return Http400
request = HttpRequest()
request.GET = {'format': 'jsonp', 'callback': '()'}
request.method = 'GET'
resp = resource.dispatch_detail(request, obj_id=1)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.content, 'JSONP callback name is invalid.')
# valid JSONP callback should work
request = HttpRequest()
request.GET = {'format': 'jsonp', 'callback': 'myCallback'}
request.method = 'GET'
resp = resource.dispatch_detail(request, obj_id=1)
self.assertEqual(resp.status_code, 200)
def test_get_schema(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.get_schema(request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"content": {"nullable": false, "readonly": false, "type": "string"}, "created": {"nullable": false, "readonly": false, "type": "datetime"}, "is_active": {"nullable": false, "readonly": false, "type": "boolean"}, "resource_uri": {"nullable": false, "readonly": true, "type": "string"}, "slug": {"nullable": false, "readonly": false, "type": "string"}, "title": {"nullable": false, "readonly": false, "type": "string"}, "updated": {"nullable": false, "readonly": false, "type": "datetime"}}')
def test_get_multiple(self):
resource = NoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.get_multiple(request, id_list='1')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"objects": [{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}]}')
resp = resource.get_multiple(request, id_list='1;2')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"objects": [{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}, {"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}]}')
resp = resource.get_multiple(request, id_list='2;3')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"not_found": ["3"], "objects": [{"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}]}')
resp = resource.get_multiple(request, id_list='1;2;4;6')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content, '{"objects": [{"content": "This is my very first post using my shiny new API. Pretty sweet, huh?", "created": "Tue, 30 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/1/", "slug": "first-post", "title": "First Post!", "updated": "Tue, 30 Mar 2010 20:05:00 -0500"}, {"content": "The dog ate my cat today. He looks seriously uncomfortable.", "created": "Wed, 31 Mar 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/2/", "slug": "another-post", "title": "Another Post", "updated": "Wed, 31 Mar 2010 20:05:00 -0500"}, {"content": "My neighborhood\'s been kinda weird lately, especially after the lava flow took out the corner store. Granny can hardly outrun the magma with her walker.", "created": "Thu, 1 Apr 2010 20:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/4/", "slug": "recent-volcanic-activity", "title": "Recent Volcanic Activity.", "updated": "Thu, 1 Apr 2010 20:05:00 -0500"}, {"content": "Man, the second eruption came on fast. Granny didn\'t have a chance. On the upshot, I was able to save her walker and I got a cool shawl out of the deal!", "created": "Fri, 2 Apr 2010 10:05:00 -0500", "is_active": true, "resource_uri": "/api/v1/notes/6/", "slug": "grannys-gone", "title": "Granny\'s Gone", "updated": "Fri, 2 Apr 2010 10:05:00 -0500"}]}')
def test_check_throttling(self):
resource = ThrottledNoteResource()
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
# Not throttled.
resp = resource.dispatch('list', request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(cache.get('noaddr_nohost_accesses')), 1)
# Not throttled.
resp = resource.dispatch('list', request)
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(cache.get('noaddr_nohost_accesses')), 2)
# Throttled.
resp = resource.dispatch('list', request)
self.assertEqual(resp.status_code, 403)
self.assertEqual(len(cache.get('noaddr_nohost_accesses')), 2)
# Throttled.
resp = resource.dispatch('list', request)
self.assertEqual(resp.status_code, 403)
self.assertEqual(len(cache.get('noaddr_nohost_accesses')), 2)
def test_generate_cache_key(self):
resource = NoteResource()
self.assertEqual(resource.generate_cache_key(), 'nonspecific:notes::')
self.assertEqual(resource.generate_cache_key('abc', '123'), 'nonspecific:notes:abc:123:')
self.assertEqual(resource.generate_cache_key(foo='bar', moof='baz'), 'nonspecific:notes::foo=bar:moof=baz')
self.assertEqual(resource.generate_cache_key('abc', '123', foo='bar', moof='baz'), 'nonspecific:notes:abc:123:foo=bar:moof=baz')
def test_cached_fetch_list(self):
resource = NoteResource()
object_list = resource.cached_fetch_list()
self.assertEqual(len(object_list), 4)
self.assertEqual(object_list[0].title.value, u'First Post!')
def test_cached_fetch_detail(self):
resource = NoteResource()
representation = resource.cached_fetch_detail(obj_id=1)
self.assertTrue(isinstance(representation, NoteRepresentation))
self.assertEqual(representation.title.value, u'First Post!')
class BasicAuthResourceTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_dispatch_list(self):
resource = NoteResource(authentication=BasicAuthentication())
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.dispatch_list(request)
self.assertEqual(resp.status_code, 401)
john_doe = User.objects.get(username='johndoe')
john_doe.set_password('pass')
john_doe.save()
request.META['HTTP_AUTHORIZATION'] = 'Basic %s' % base64.b64encode('johndoe:pass')
resp = resource.dispatch_list(request)
self.assertEqual(resp.status_code, 200)
def test_dispatch_detail(self):
resource = NoteResource(authentication=BasicAuthentication())
request = HttpRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.dispatch_detail(request, obj_id=1)
self.assertEqual(resp.status_code, 401)
john_doe = User.objects.get(username='johndoe')
john_doe.set_password('pass')
john_doe.save()
request.META['HTTP_AUTHORIZATION'] = 'Basic %s' % base64.b64encode('johndoe:pass')
resp = resource.dispatch_list(request)
self.assertEqual(resp.status_code, 200)
| 61.258687
| 1,447
| 0.643924
| 4,035
| 31,732
| 4.966047
| 0.080545
| 0.095818
| 0.023156
| 0.023954
| 0.846841
| 0.826779
| 0.790598
| 0.741142
| 0.705809
| 0.683551
| 0
| 0.053792
| 0.203832
| 31,732
| 517
| 1,448
| 61.377176
| 0.739352
| 0.020043
| 0
| 0.571802
| 0
| 0.065274
| 0.353536
| 0.011008
| 0
| 0
| 0
| 0
| 0.35248
| 1
| 0.073107
| false
| 0.013055
| 0.033943
| 0.005222
| 0.154047
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54736218f3d6d39398ec93d7aa01b7102fc46d7e
| 135
|
py
|
Python
|
src/app/auth/__init__.py
|
Ezequiel-Vega/peg
|
ff5c41c91df7885e0fd3d4c750497dd2d8290b67
|
[
"MIT"
] | null | null | null |
src/app/auth/__init__.py
|
Ezequiel-Vega/peg
|
ff5c41c91df7885e0fd3d4c750497dd2d8290b67
|
[
"MIT"
] | null | null | null |
src/app/auth/__init__.py
|
Ezequiel-Vega/peg
|
ff5c41c91df7885e0fd3d4c750497dd2d8290b67
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
auth_bp : Blueprint = Blueprint("auth", __name__, template_folder='templates/auth')
from . import routes
| 22.5
| 83
| 0.777778
| 17
| 135
| 5.823529
| 0.647059
| 0.262626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125926
| 135
| 5
| 84
| 27
| 0.838983
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
49eda6fd9e4d9b4f195463aff19bfea647a64343
| 2,633
|
py
|
Python
|
Website Pinger.py
|
RAZERDK/WebsidePinger
|
d68b2ee4043cbf4d655a15ccf1530ce1a4119947
|
[
"Apache-2.0"
] | null | null | null |
Website Pinger.py
|
RAZERDK/WebsidePinger
|
d68b2ee4043cbf4d655a15ccf1530ce1a4119947
|
[
"Apache-2.0"
] | null | null | null |
Website Pinger.py
|
RAZERDK/WebsidePinger
|
d68b2ee4043cbf4d655a15ccf1530ce1a4119947
|
[
"Apache-2.0"
] | null | null | null |
import os
import time
hostnames = [
'Github.com',
#Du Kan bare Add Din Egen Webside/Ip Den Skal Pinge
]
for hostname in hostnames:
response = os.system('ping ' + hostname)
if response == 0:
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
print (hostname, 'Pinger')
else:
print (hostname, 'Invalid IP')
print (hostname, 'Invalid IP')
print (hostname, 'Invalid IP')
print (hostname, 'Invalid IP')
print (hostname, 'Invalid IP')
print (hostname, 'Invalid IP')
print (hostname, 'Invalid IP')
print (hostname, 'Invalid IP')
#Venter På Response
print ('')
print ('')
print ('')
print ('')
print ('')
print ('')
print("Alle Angivet Websider har fåedt Tjeket Status")
| 32.109756
| 62
| 0.544246
| 242
| 2,633
| 5.921488
| 0.157025
| 0.562456
| 0.71598
| 0.887648
| 0.863224
| 0.863224
| 0.8388
| 0.8388
| 0.8388
| 0.8388
| 0
| 0.000558
| 0.319028
| 2,633
| 82
| 62
| 32.109756
| 0.798661
| 0.025826
| 0
| 0.871795
| 0
| 0
| 0.181357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.025641
| null | null | 0.884615
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
b7191677bb57e6665a5b812a295d08a8fd968a56
| 1,383
|
py
|
Python
|
pysnowball/utls.py
|
chntylz/pysnowball
|
eb3b8c5e911455354d38acb237bd640a2acb0532
|
[
"Apache-2.0"
] | null | null | null |
pysnowball/utls.py
|
chntylz/pysnowball
|
eb3b8c5e911455354d38acb237bd640a2acb0532
|
[
"Apache-2.0"
] | null | null | null |
pysnowball/utls.py
|
chntylz/pysnowball
|
eb3b8c5e911455354d38acb237bd640a2acb0532
|
[
"Apache-2.0"
] | null | null | null |
import requests
import json
import pysnowball.cons as cons
import pysnowball.token as token
debug = 0
#debug = 1
def fetch(url):
HEADERS = {'Host': 'stock.xueqiu.com',
'Accept': 'application/json',
'Cookie': token.get_token(),
'User-Agent': 'Xueqiu iPhone 11.8',
'Accept-Language': 'zh-Hans-CN;q=1, ja-JP;q=0.9',
'Accept-Encoding': 'br, gzip, deflate',
'Connection': 'keep-alive'}
response = requests.get(url,headers=HEADERS)
if debug:
print(url)
print(HEADERS)
print(response)
print(response.content)
if response.status_code != 200:
raise Exception(response.content)
return json.loads(response.content)
def fetch_without_token(url):
HEADERS = {'Host': 'stock.xueqiu.com',
'Accept': 'application/json',
'User-Agent': 'Xueqiu iPhone 11.8',
'Accept-Language': 'zh-Hans-CN;q=1, ja-JP;q=0.9',
'Accept-Encoding': 'br, gzip, deflate',
'Connection': 'keep-alive'}
response = requests.get(url, headers=HEADERS)
if debug:
print(url)
print(HEADERS)
print(response)
print(response.content)
if response.status_code != 200:
raise Exception(response.content)
return json.loads(response.content)
| 26.09434
| 64
| 0.577007
| 160
| 1,383
| 4.95625
| 0.3375
| 0.113493
| 0.035309
| 0.047919
| 0.827238
| 0.827238
| 0.827238
| 0.827238
| 0.827238
| 0.703657
| 0
| 0.020325
| 0.288503
| 1,383
| 52
| 65
| 26.596154
| 0.785569
| 0.006508
| 0
| 0.789474
| 0
| 0
| 0.243263
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.105263
| 0
| 0.210526
| 0.210526
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f787f77bead57f93411e68d983b26ea3909974c
| 1,274
|
py
|
Python
|
amanturcolor.py
|
Amankumar10/Aman
|
e75f3ff16ecfe5c3320c7f5be9c02bb8787d2b34
|
[
"MIT"
] | null | null | null |
amanturcolor.py
|
Amankumar10/Aman
|
e75f3ff16ecfe5c3320c7f5be9c02bb8787d2b34
|
[
"MIT"
] | null | null | null |
amanturcolor.py
|
Amankumar10/Aman
|
e75f3ff16ecfe5c3320c7f5be9c02bb8787d2b34
|
[
"MIT"
] | null | null | null |
import turtle
colors=["red","blue","green","black","brown"]
my_turtle = turtle.Turtle()
my_turtle.speed(70)
for i in range(276):
my_turtle.pencolor(colors[i%5])
my_turtle.left(70) #(a)
my_turtle.forward(100) #(a)
my_turtle.left(55) #(a)
my_turtle.backward(100) #(a)
my_turtle.forward(50) #(a)
my_turtle.left(55) #(a)
my_turtle.forward(50) #(a)
my_turtle.backward(50) #(a)
my_turtle.left(124) #(a)
my_turtle.forward(50) #(a)
my_turtle.left(150)
my_turtle.forward(100)
my_turtle.right(150)
my_turtle.forward(60)
my_turtle.right(60)
my_turtle.backward(60)
my_turtle.left(30)
my_turtle.forward(100)
my_turtle.left(150) #(a)
my_turtle.forward(100) #(a)
my_turtle.left(55) #(a)
my_turtle.backward(100) #(a)
my_turtle.forward(50) #(a)
my_turtle.left(55) #(a)
my_turtle.forward(50) #(a)
my_turtle.backward(50) #(a)
my_turtle.left(124) #(a)
my_turtle.forward(50) #(a)
my_turtle.left(150)
my_turtle.forward(100)
my_turtle.right(150)
my_turtle.forward(100)
my_turtle.left(150)
my_turtle.forward(100)
my_turtle.right(9)
my_turtle.left(150)
input()
| 24.5
| 45
| 0.599686
| 192
| 1,274
| 3.776042
| 0.166667
| 0.430345
| 0.248276
| 0.176552
| 0.728276
| 0.728276
| 0.728276
| 0.728276
| 0.678621
| 0.678621
| 0
| 0.098242
| 0.240973
| 1,274
| 52
| 46
| 24.5
| 0.651499
| 0.047096
| 0
| 0.697674
| 0
| 0
| 0.01841
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.023256
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3fa9eb3ac089bc16f50f6df2000ad8c4d8b38804
| 97,972
|
py
|
Python
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_vswitch.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_vswitch.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_vswitch.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_vswitch(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def get_vnetwork_hosts_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
name = ET.SubElement(vnetwork_hosts, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_vmnic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
vmnic = ET.SubElement(vnetwork_hosts, "vmnic")
vmnic.text = kwargs.pop('vmnic')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
datacenter = ET.SubElement(vnetwork_hosts, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
mac = ET.SubElement(vnetwork_hosts, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_vswitch(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
vswitch = ET.SubElement(vnetwork_hosts, "vswitch")
vswitch.text = kwargs.pop('vswitch')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
interface_type = ET.SubElement(vnetwork_hosts, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
interface_name = ET.SubElement(vnetwork_hosts, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
name = ET.SubElement(vnetwork_vms, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
mac = ET.SubElement(vnetwork_vms, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
datacenter = ET.SubElement(vnetwork_vms, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
ip = ET.SubElement(vnetwork_vms, "ip")
ip.text = kwargs.pop('ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_host_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
host_nn = ET.SubElement(vnetwork_vms, "host-nn")
host_nn.text = kwargs.pop('host_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
name = ET.SubElement(vnetwork_dvpgs, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
datacenter = ET.SubElement(vnetwork_dvpgs, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_dvs_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
dvs_nn = ET.SubElement(vnetwork_dvpgs, "dvs-nn")
dvs_nn.text = kwargs.pop('dvs_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
vlan = ET.SubElement(vnetwork_dvpgs, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
name = ET.SubElement(vnetwork_dvs, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_host(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
host = ET.SubElement(vnetwork_dvs, "host")
host.text = kwargs.pop('host')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
datacenter = ET.SubElement(vnetwork_dvs, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_pnic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
pnic = ET.SubElement(vnetwork_dvs, "pnic")
pnic.text = kwargs.pop('pnic')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
interface_type = ET.SubElement(vnetwork_dvs, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
interface_name = ET.SubElement(vnetwork_dvs, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
name = ET.SubElement(vnetwork_vswitches, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_host(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
host = ET.SubElement(vnetwork_vswitches, "host")
host.text = kwargs.pop('host')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
datacenter = ET.SubElement(vnetwork_vswitches, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_pnic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
pnic = ET.SubElement(vnetwork_vswitches, "pnic")
pnic.text = kwargs.pop('pnic')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
interface_type = ET.SubElement(vnetwork_vswitches, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
interface_name = ET.SubElement(vnetwork_vswitches, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
name = ET.SubElement(vnetwork_pgs, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
datacenter = ET.SubElement(vnetwork_pgs, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_vs_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
vs_nn = ET.SubElement(vnetwork_pgs, "vs-nn")
vs_nn.text = kwargs.pop('vs_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
vlan = ET.SubElement(vnetwork_pgs, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_host_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
host_nn = ET.SubElement(vnetwork_pgs, "host-nn")
host_nn.text = kwargs.pop('host_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
mac = ET.SubElement(input, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
mac = ET.SubElement(vmpolicy_macaddr, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
name = ET.SubElement(vmpolicy_macaddr, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
datacenter = ET.SubElement(vmpolicy_macaddr, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_dvpg_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
dvpg_nn = ET.SubElement(vmpolicy_macaddr, "dvpg-nn")
dvpg_nn.text = kwargs.pop('dvpg_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_port_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
port_nn = ET.SubElement(vmpolicy_macaddr, "port-nn")
port_nn.text = kwargs.pop('port_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_port_prof(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
port_prof = ET.SubElement(vmpolicy_macaddr, "port-prof")
port_prof.text = kwargs.pop('port_prof')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id = ET.SubElement(vcenter, "id")
id.text = kwargs.pop('id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_credentials_url(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
credentials = ET.SubElement(vcenter, "credentials")
url = ET.SubElement(credentials, "url")
url.text = kwargs.pop('url')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_credentials_username(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
credentials = ET.SubElement(vcenter, "credentials")
username = ET.SubElement(credentials, "username")
username.text = kwargs.pop('username')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_credentials_password(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
credentials = ET.SubElement(vcenter, "credentials")
password = ET.SubElement(credentials, "password")
password.text = kwargs.pop('password')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_activate(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
activate = ET.SubElement(vcenter, "activate")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_interval(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
interval = ET.SubElement(vcenter, "interval")
interval.text = kwargs.pop('interval')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_discovery_ignore_delete_all_response_ignore_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
discovery = ET.SubElement(vcenter, "discovery")
ignore_delete_all_response = ET.SubElement(discovery, "ignore-delete-all-response")
ignore_value = ET.SubElement(ignore_delete_all_response, "ignore-value")
ignore_value.text = kwargs.pop('ignore_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_discovery_ignore_delete_all_response_always(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
discovery = ET.SubElement(vcenter, "discovery")
ignore_delete_all_response = ET.SubElement(discovery, "ignore-delete-all-response")
always = ET.SubElement(ignore_delete_all_response, "always")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
input = ET.SubElement(get_vnetwork_hosts, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
name = ET.SubElement(vnetwork_hosts, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_vmnic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
vmnic = ET.SubElement(vnetwork_hosts, "vmnic")
vmnic.text = kwargs.pop('vmnic')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
datacenter = ET.SubElement(vnetwork_hosts, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
mac = ET.SubElement(vnetwork_hosts, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_vswitch(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
vswitch = ET.SubElement(vnetwork_hosts, "vswitch")
vswitch.text = kwargs.pop('vswitch')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
interface_type = ET.SubElement(vnetwork_hosts, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_vnetwork_hosts_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts")
interface_name = ET.SubElement(vnetwork_hosts, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_hosts_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_hosts = ET.Element("get_vnetwork_hosts")
config = get_vnetwork_hosts
output = ET.SubElement(get_vnetwork_hosts, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
input = ET.SubElement(get_vnetwork_vms, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
name = ET.SubElement(vnetwork_vms, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
mac = ET.SubElement(vnetwork_vms, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
datacenter = ET.SubElement(vnetwork_vms, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
ip = ET.SubElement(vnetwork_vms, "ip")
ip.text = kwargs.pop('ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_vnetwork_vms_host_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
vnetwork_vms = ET.SubElement(output, "vnetwork-vms")
host_nn = ET.SubElement(vnetwork_vms, "host-nn")
host_nn.text = kwargs.pop('host_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vms_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vms = ET.Element("get_vnetwork_vms")
config = get_vnetwork_vms
output = ET.SubElement(get_vnetwork_vms, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
input = ET.SubElement(get_vnetwork_dvpgs, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
name = ET.SubElement(vnetwork_dvpgs, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
datacenter = ET.SubElement(vnetwork_dvpgs, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_dvs_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
dvs_nn = ET.SubElement(vnetwork_dvpgs, "dvs-nn")
dvs_nn.text = kwargs.pop('dvs_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_vnetwork_dvpgs_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
vnetwork_dvpgs = ET.SubElement(output, "vnetwork-dvpgs")
vlan = ET.SubElement(vnetwork_dvpgs, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvpgs_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvpgs = ET.Element("get_vnetwork_dvpgs")
config = get_vnetwork_dvpgs
output = ET.SubElement(get_vnetwork_dvpgs, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
input = ET.SubElement(get_vnetwork_dvs, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
name = ET.SubElement(vnetwork_dvs, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_host(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
host = ET.SubElement(vnetwork_dvs, "host")
host.text = kwargs.pop('host')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
datacenter = ET.SubElement(vnetwork_dvs, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_pnic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
pnic = ET.SubElement(vnetwork_dvs, "pnic")
pnic.text = kwargs.pop('pnic')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
interface_type = ET.SubElement(vnetwork_dvs, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_vnetwork_dvs_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
vnetwork_dvs = ET.SubElement(output, "vnetwork-dvs")
interface_name = ET.SubElement(vnetwork_dvs, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_dvs_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_dvs = ET.Element("get_vnetwork_dvs")
config = get_vnetwork_dvs
output = ET.SubElement(get_vnetwork_dvs, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
input = ET.SubElement(get_vnetwork_vswitches, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
name = ET.SubElement(vnetwork_vswitches, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_host(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
host = ET.SubElement(vnetwork_vswitches, "host")
host.text = kwargs.pop('host')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
datacenter = ET.SubElement(vnetwork_vswitches, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_pnic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
pnic = ET.SubElement(vnetwork_vswitches, "pnic")
pnic.text = kwargs.pop('pnic')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
interface_type = ET.SubElement(vnetwork_vswitches, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_vnetwork_vswitches_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
vnetwork_vswitches = ET.SubElement(output, "vnetwork-vswitches")
interface_name = ET.SubElement(vnetwork_vswitches, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_vswitches_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_vswitches = ET.Element("get_vnetwork_vswitches")
config = get_vnetwork_vswitches
output = ET.SubElement(get_vnetwork_vswitches, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
name = ET.SubElement(vnetwork_pgs, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
datacenter = ET.SubElement(vnetwork_pgs, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_vs_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
vs_nn = ET.SubElement(vnetwork_pgs, "vs-nn")
vs_nn.text = kwargs.pop('vs_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
vlan = ET.SubElement(vnetwork_pgs, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_vnetwork_pgs_host_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs")
host_nn = ET.SubElement(vnetwork_pgs, "host-nn")
host_nn.text = kwargs.pop('host_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vnetwork_portgroups_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
output = ET.SubElement(get_vnetwork_portgroups, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
mac = ET.SubElement(input, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_vcenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
vcenter = ET.SubElement(input, "vcenter")
vcenter.text = kwargs.pop('vcenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
datacenter = ET.SubElement(input, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
input = ET.SubElement(get_vmpolicy_macaddr, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
mac = ET.SubElement(vmpolicy_macaddr, "mac")
mac.text = kwargs.pop('mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
name = ET.SubElement(vmpolicy_macaddr, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_datacenter(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
datacenter = ET.SubElement(vmpolicy_macaddr, "datacenter")
datacenter.text = kwargs.pop('datacenter')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_dvpg_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
dvpg_nn = ET.SubElement(vmpolicy_macaddr, "dvpg-nn")
dvpg_nn.text = kwargs.pop('dvpg_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_port_nn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
port_nn = ET.SubElement(vmpolicy_macaddr, "port-nn")
port_nn.text = kwargs.pop('port_nn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_port_prof(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
vmpolicy_macaddr = ET.SubElement(output, "vmpolicy-macaddr")
port_prof = ET.SubElement(vmpolicy_macaddr, "port-prof")
port_prof.text = kwargs.pop('port_prof')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vmpolicy_macaddr_output_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vmpolicy_macaddr = ET.Element("get_vmpolicy_macaddr")
config = get_vmpolicy_macaddr
output = ET.SubElement(get_vmpolicy_macaddr, "output")
instance_id = ET.SubElement(output, "instance-id")
instance_id.text = kwargs.pop('instance_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id = ET.SubElement(vcenter, "id")
id.text = kwargs.pop('id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_credentials_url(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
credentials = ET.SubElement(vcenter, "credentials")
url = ET.SubElement(credentials, "url")
url.text = kwargs.pop('url')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_credentials_username(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
credentials = ET.SubElement(vcenter, "credentials")
username = ET.SubElement(credentials, "username")
username.text = kwargs.pop('username')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_credentials_password(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
credentials = ET.SubElement(vcenter, "credentials")
password = ET.SubElement(credentials, "password")
password.text = kwargs.pop('password')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_activate(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
activate = ET.SubElement(vcenter, "activate")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_interval(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
interval = ET.SubElement(vcenter, "interval")
interval.text = kwargs.pop('interval')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_discovery_ignore_delete_all_response_ignore_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
discovery = ET.SubElement(vcenter, "discovery")
ignore_delete_all_response = ET.SubElement(discovery, "ignore-delete-all-response")
ignore_value = ET.SubElement(ignore_delete_all_response, "ignore-value")
ignore_value.text = kwargs.pop('ignore_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def vcenter_discovery_ignore_delete_all_response_always(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop('id')
discovery = ET.SubElement(vcenter, "discovery")
ignore_delete_all_response = ET.SubElement(discovery, "ignore-delete-all-response")
always = ET.SubElement(ignore_delete_all_response, "always")
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 40.669157
| 96
| 0.645603
| 10,780
| 97,972
| 5.601206
| 0.007514
| 0.125702
| 0.077707
| 0.074113
| 0.997797
| 0.997797
| 0.997797
| 0.997797
| 0.997797
| 0.997797
| 0
| 0
| 0.241957
| 97,972
| 2,409
| 97
| 40.669157
| 0.813026
| 0.053168
| 0
| 0.997633
| 1
| 0
| 0.124788
| 0.018657
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105917
| false
| 0.00355
| 0.000592
| 0
| 0.212426
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3ff86c8dec7ad34f11e3e789b712aec8c28882de
| 1,470
|
py
|
Python
|
bidder/migrations/0002_auto_20181217_2103.py
|
rManiks/moradmin
|
95f65a3ec188af458a062d89d02d0004f7b38f19
|
[
"Apache-2.0"
] | null | null | null |
bidder/migrations/0002_auto_20181217_2103.py
|
rManiks/moradmin
|
95f65a3ec188af458a062d89d02d0004f7b38f19
|
[
"Apache-2.0"
] | null | null | null |
bidder/migrations/0002_auto_20181217_2103.py
|
rManiks/moradmin
|
95f65a3ec188af458a062d89d02d0004f7b38f19
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.1.3 on 2018-12-17 21:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bidder', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='location',
name='address_line_two',
field=models.CharField(default='none', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='location',
name='city',
field=models.CharField(default='none', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='location',
name='country',
field=models.CharField(default='none', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='location',
name='district',
field=models.CharField(default='none', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='location',
name='post_code',
field=models.CharField(default='none', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='location',
name='state',
field=models.CharField(default='none', max_length=100),
preserve_default=False,
),
]
| 29.4
| 67
| 0.557823
| 138
| 1,470
| 5.782609
| 0.333333
| 0.135338
| 0.172932
| 0.203008
| 0.766917
| 0.766917
| 0.718045
| 0.718045
| 0.718045
| 0.718045
| 0
| 0.037374
| 0.326531
| 1,470
| 49
| 68
| 30
| 0.768687
| 0.030612
| 0
| 0.697674
| 1
| 0
| 0.097681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b75e9a5b6872fb21df127ccb637275e61cbcfc2c
| 197
|
py
|
Python
|
src/pipedown/nodes/metrics/__init__.py
|
brendanhasz/drainpype
|
a183acec7cae1ef9fde260868e2b021516a8cd7f
|
[
"MIT"
] | 2
|
2021-03-03T12:11:24.000Z
|
2021-03-18T15:09:52.000Z
|
src/pipedown/nodes/metrics/__init__.py
|
brendanhasz/pipedown
|
a183acec7cae1ef9fde260868e2b021516a8cd7f
|
[
"MIT"
] | null | null | null |
src/pipedown/nodes/metrics/__init__.py
|
brendanhasz/pipedown
|
a183acec7cae1ef9fde260868e2b021516a8cd7f
|
[
"MIT"
] | null | null | null |
from .mean_absolute_percentage_error import MeanAbsolutePercentageError
from .mean_squared_error import MeanSquaredError
from .median_absolute_percentage_error import MedianAbsolutePercentageError
| 49.25
| 75
| 0.923858
| 20
| 197
| 8.7
| 0.55
| 0.189655
| 0.264368
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060914
| 197
| 3
| 76
| 65.666667
| 0.940541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b78f5f0a32a03a11940b444427fbbe4b39463631
| 41,198
|
py
|
Python
|
tests/milvus_python_test/test_compact.py
|
youny626/milvus
|
9e55802c5d515ceecc4cadab9f2fd1cb477d75d5
|
[
"Apache-2.0"
] | null | null | null |
tests/milvus_python_test/test_compact.py
|
youny626/milvus
|
9e55802c5d515ceecc4cadab9f2fd1cb477d75d5
|
[
"Apache-2.0"
] | null | null | null |
tests/milvus_python_test/test_compact.py
|
youny626/milvus
|
9e55802c5d515ceecc4cadab9f2fd1cb477d75d5
|
[
"Apache-2.0"
] | 1
|
2021-07-08T07:22:59.000Z
|
2021-07-08T07:22:59.000Z
|
import time
import random
import pdb
import threading
import logging
from multiprocessing import Pool, Process
import pytest
from milvus import IndexType, MetricType
from utils import *
dim = 128
index_file_size = 10
COMPACT_TIMEOUT = 30
nprobe = 1
top_k = 1
tag = "1970-01-01"
nb = 6000
class TestCompactBase:
"""
******************************************************************
The following cases are used to test `compact` function
******************************************************************
"""
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_compact_table_name_None(self, connect, table):
'''
target: compact table where table name is None
method: compact with the table_name: None
expected: exception raised
'''
table_name = None
with pytest.raises(Exception) as e:
status = connect.compact(table_name)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_compact_table_name_not_existed(self, connect, table):
'''
target: compact table not existed
method: compact with a random table_name, which is not in db
expected: status not ok
'''
table_name = gen_unique_str("not_existed_table")
status = connect.compact(table_name)
assert not status.OK()
@pytest.fixture(
scope="function",
params=gen_invalid_table_names()
)
def get_table_name(self, request):
yield request.param
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_compact_table_name_invalid(self, connect, get_table_name):
'''
target: compact table with invalid name
method: compact with invalid table_name
expected: status not ok
'''
table_name = get_table_name
status = connect.compact(table_name)
assert not status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_and_compact(self, connect, table):
'''
target: test add vector and compact
method: add vector and compact table
expected: status ok, vector added
'''
vector = gen_single_vector(dim)
status, ids = connect.add_vectors(table, vector)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
logging.getLogger().info(info)
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_and_compact(self, connect, table):
'''
target: test add vectors and compact
method: add vectors and compact table
expected: status ok, vectors added
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_part_and_compact(self, connect, table):
'''
target: test add vectors, delete part of them and compact
method: add vectors, delete a few and compact table
expected: status ok, data size is smaller after compact
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(table, delete_ids)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
size_before = info.partitions_stat[0].segments_stat[0].data_size
logging.getLogger().info(size_before)
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
size_after = info.partitions_stat[0].segments_stat[0].data_size
logging.getLogger().info(size_after)
assert(size_before > size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_all_and_compact(self, connect, table):
'''
target: test add vectors, delete them and compact
method: add vectors, delete all and compact table
expected: status ok, no data size in table info because table is empty
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
status = connect.delete_by_id(table, ids)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
assert(len(info.partitions_stat[0].segments_stat) == 0)
@pytest.fixture(
scope="function",
params=gen_simple_index_params()
)
def get_simple_index_params(self, request, connect):
if str(connect._cmd("mode")[1]) == "CPU":
if request.param["index_type"] not in [IndexType.IVF_SQ8, IndexType.IVFLAT, IndexType.FLAT]:
pytest.skip("Only support index_type: flat/ivf_flat/ivf_sq8")
else:
pytest.skip("Only support CPU mode")
return request.param
def test_compact_after_index_created(self, connect, table, get_simple_index_params):
'''
target: test compact table after index created
method: add vectors, create index, delete part of vectors and compact
expected: status ok, index description no change, data size smaller after compact
'''
count = 10
index_params = get_simple_index_params
vectors = gen_vector(count, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
status = connect.create_index(table, index_params)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
logging.getLogger().info(info.partitions_stat)
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(table, delete_ids)
assert status.OK()
status = connect.flush([table])
assert status.OK()
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before > size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_and_compact_twice(self, connect, table):
'''
target: test add vector and compact twice
method: add vector and compact table twice
expected: status ok, data size no change
'''
vector = gen_single_vector(dim)
status, ids = connect.add_vectors(table, vector)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact twice
status, info = connect.table_info(table)
assert status.OK()
size_after_twice = info.partitions_stat[0].segments_stat[0].data_size
assert(size_after == size_after_twice)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_part_and_compact_twice(self, connect, table):
'''
target: test add vectors, delete part of them and compact twice
method: add vectors, delete part and compact table twice
expected: status ok, data size smaller after first compact, no change after second
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(table, delete_ids)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before > size_after)
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact twice
status, info = connect.table_info(table)
assert status.OK()
size_after_twice = info.partitions_stat[0].segments_stat[0].data_size
assert(size_after == size_after_twice)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_compact_multi_tables(self, connect):
'''
target: test compact works or not with multiple tables
method: create 50 tables, add vectors into them and compact in turn
expected: status ok
'''
nq = 100
num_tables = 50
vectors = gen_vectors(nq, dim)
table_list = []
for i in range(num_tables):
table_name = gen_unique_str("test_compact_multi_table_%d" % i)
table_list.append(table_name)
param = {'table_name': table_name,
'dimension': dim,
'index_file_size': index_file_size,
'metric_type': MetricType.L2}
connect.create_table(param)
time.sleep(6)
for i in range(num_tables):
status, ids = connect.add_vectors(table_name=table_list[i], records=vectors)
assert status.OK()
status = connect.compact(table_list[i])
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_after_compact(self, connect, table):
'''
target: test add vector after compact
method: after compact operation, add vector
expected: status ok, vector added
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
vector = gen_single_vector(dim)
status, ids = connect.add_vectors(table, vector)
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_index_creation_after_compact(self, connect, table, get_simple_index_params):
'''
target: test index creation after compact
method: after compact operation, create index
expected: status ok, index description no change
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
index_params = get_simple_index_params
status = connect.create_index(table, index_params)
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_delete_vectors_after_compact(self, connect, table):
'''
target: test delete vectors after compact
method: after compact operation, delete vectors
expected: status ok, vectors deleted
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
status = connect.delete_by_id(table, ids)
assert status.OK()
status = connect.flush([table])
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_search_after_compact(self, connect, table):
'''
target: test search after compact
method: after compact operation, search vector
expected: status ok
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
status = connect.compact(table)
assert status.OK()
status = connect.flush([table])
assert status.OK()
query_vecs = [vectors[0]]
status, res = connect.search_vectors(table, top_k, nprobe, query_vecs)
logging.getLogger().info(res)
assert status.OK()
def test_compact_server_crashed_recovery(self, connect, table):
'''
target: test compact when server crashed unexpectedly and restarted
method: add vectors, delete and compact table; server stopped and restarted during compact
expected: status ok, request recovered
'''
vectors = gen_vector(nb * 100, dim)
status, ids = connect.add_vectors(table, vectors)
assert status.OK()
status = connect.flush([table])
assert status.OK()
delete_ids = ids[0:1000]
status = connect.delete_by_id(table, delete_ids)
assert status.OK()
status = connect.flush([table])
assert status.OK()
# start to compact, kill and restart server
logging.getLogger().info("compact starting...")
status = connect.compact(table)
# pdb.set_trace()
assert status.OK()
status = connect.flush([table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(table)
assert status.OK()
assert info.partitions_stat[0].count == nb * 100 - 1000
class TestCompactJAC:
"""
******************************************************************
The following cases are used to test `compact` function
******************************************************************
"""
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_and_compact(self, connect, jac_table):
'''
target: test add vector and compact
method: add vector and compact table
expected: status ok, vector added
'''
tmp, vector = gen_binary_vectors(1, dim)
status, ids = connect.add_vectors(jac_table, vector)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_and_compact(self, connect, jac_table):
'''
target: test add vectors and compact
method: add vectors and compact table
expected: status ok, vectors added
'''
tmp, vectors = gen_binary_vectors(nb, dim)
status, ids = connect.add_vectors(jac_table, vectors)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_part_and_compact(self, connect, jac_table):
'''
target: test add vectors, delete part of them and compact
method: add vectors, delete a few and compact table
expected: status ok, data size is smaller after compact
'''
tmp, vectors = gen_binary_vectors(nb, dim)
status, ids = connect.add_vectors(jac_table, vectors)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(jac_table, delete_ids)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(jac_table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
size_before = info.partitions_stat[0].segments_stat[0].data_size
logging.getLogger().info(size_before)
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(jac_table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
size_after = info.partitions_stat[0].segments_stat[0].data_size
logging.getLogger().info(size_after)
assert(size_before > size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_all_and_compact(self, connect, jac_table):
'''
target: test add vectors, delete them and compact
method: add vectors, delete all and compact table
expected: status ok, no data size in table info because table is empty
'''
tmp, vectors = gen_binary_vectors(nb, dim)
status, ids = connect.add_vectors(jac_table, vectors)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
status = connect.delete_by_id(jac_table, ids)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(jac_table)
assert status.OK()
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(jac_table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
assert(len(info.partitions_stat[0].segments_stat) == 0)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_and_compact_twice(self, connect, jac_table):
'''
target: test add vector and compact twice
method: add vector and compact table twice
expected: status ok
'''
tmp, vector = gen_binary_vectors(1, dim)
status, ids = connect.add_vectors(jac_table, vector)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact twice
status, info = connect.table_info(jac_table)
assert status.OK()
size_after_twice = info.partitions_stat[0].segments_stat[0].data_size
assert(size_after == size_after_twice)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_part_and_compact_twice(self, connect, jac_table):
'''
target: test add vectors, delete part of them and compact twice
method: add vectors, delete part and compact table twice
expected: status ok, data size smaller after first compact, no change after second
'''
tmp, vectors = gen_binary_vectors(nb, dim)
status, ids = connect.add_vectors(jac_table, vectors)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(jac_table, delete_ids)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before > size_after)
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact twice
status, info = connect.table_info(jac_table)
assert status.OK()
size_after_twice = info.partitions_stat[0].segments_stat[0].data_size
assert(size_after == size_after_twice)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_compact_multi_tables(self, connect):
'''
target: test compact works or not with multiple tables
method: create 50 tables, add vectors into them and compact in turn
expected: status ok
'''
nq = 100
num_tables = 50
tmp, vectors = gen_binary_vectors(nq, dim)
table_list = []
for i in range(num_tables):
table_name = gen_unique_str("test_compact_multi_table_%d" % i)
table_list.append(table_name)
param = {'table_name': table_name,
'dimension': dim,
'index_file_size': index_file_size,
'metric_type': MetricType.JACCARD}
connect.create_table(param)
time.sleep(6)
for i in range(num_tables):
status, ids = connect.add_vectors(table_name=table_list[i], records=vectors)
assert status.OK()
status = connect.compact(table_list[i])
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_after_compact(self, connect, jac_table):
'''
target: test add vector after compact
method: after compact operation, add vector
expected: status ok, vector added
'''
tmp, vectors = gen_binary_vectors(nb, dim)
status, ids = connect.add_vectors(jac_table, vectors)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(jac_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
tmp, vector = gen_binary_vectors(1, dim)
status, ids = connect.add_vectors(jac_table, vector)
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_delete_vectors_after_compact(self, connect, jac_table):
'''
target: test delete vectors after compact
method: after compact operation, delete vectors
expected: status ok, vectors deleted
'''
tmp, vectors = gen_binary_vectors(nb, dim)
status, ids = connect.add_vectors(jac_table, vectors)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
status = connect.delete_by_id(jac_table, ids)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_search_after_compact(self, connect, jac_table):
'''
target: test search after compact
method: after compact operation, search vector
expected: status ok
'''
tmp, vectors = gen_binary_vectors(nb, dim)
status, ids = connect.add_vectors(jac_table, vectors)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
status = connect.compact(jac_table)
assert status.OK()
status = connect.flush([jac_table])
assert status.OK()
query_vecs = [vectors[0]]
status, res = connect.search_vectors(jac_table, top_k, nprobe, query_vecs)
logging.getLogger().info(res)
assert status.OK()
class TestCompactIP:
"""
******************************************************************
The following cases are used to test `compact` function
******************************************************************
"""
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_and_compact(self, connect, ip_table):
'''
target: test add vector and compact
method: add vector and compact table
expected: status ok, vector added
'''
vector = gen_single_vector(dim)
status, ids = connect.add_vectors(ip_table, vector)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_and_compact(self, connect, ip_table):
'''
target: test add vectors and compact
method: add vectors and compact table
expected: status ok, vectors added
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(ip_table, vectors)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_part_and_compact(self, connect, ip_table):
'''
target: test add vectors, delete part of them and compact
method: add vectors, delete a few and compact table
expected: status ok, data size is smaller after compact
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(ip_table, vectors)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(ip_table, delete_ids)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(ip_table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
size_before = info.partitions_stat[0].segments_stat[0].data_size
logging.getLogger().info(size_before)
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(ip_table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
size_after = info.partitions_stat[0].segments_stat[0].data_size
logging.getLogger().info(size_after)
assert(size_before > size_after)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_all_and_compact(self, connect, ip_table):
'''
target: test add vectors, delete them and compact
method: add vectors, delete all and compact table
expected: status ok, no data size in table info because table is empty
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(ip_table, vectors)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
status = connect.delete_by_id(ip_table, ids)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(ip_table)
assert status.OK()
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(ip_table)
assert status.OK()
logging.getLogger().info(info.partitions_stat)
assert(len(info.partitions_stat[0].segments_stat) == 0)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_and_compact_twice(self, connect, ip_table):
'''
target: test add vector and compact twice
method: add vector and compact table twice
expected: status ok
'''
vector = gen_single_vector(dim)
status, ids = connect.add_vectors(ip_table, vector)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact twice
status, info = connect.table_info(ip_table)
assert status.OK()
size_after_twice = info.partitions_stat[0].segments_stat[0].data_size
assert(size_after == size_after_twice)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vectors_delete_part_and_compact_twice(self, connect, ip_table):
'''
target: test add vectors, delete part of them and compact twice
method: add vectors, delete part and compact table twice
expected: status ok, data size smaller after first compact, no change after second
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(ip_table, vectors)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(ip_table, delete_ids)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before > size_after)
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact twice
status, info = connect.table_info(ip_table)
assert status.OK()
size_after_twice = info.partitions_stat[0].segments_stat[0].data_size
assert(size_after == size_after_twice)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_compact_multi_tables(self, connect):
'''
target: test compact works or not with multiple tables
method: create 50 tables, add vectors into them and compact in turn
expected: status ok
'''
nq = 100
num_tables = 50
vectors = gen_vectors(nq, dim)
table_list = []
for i in range(num_tables):
table_name = gen_unique_str("test_compact_multi_table_%d" % i)
table_list.append(table_name)
param = {'table_name': table_name,
'dimension': dim,
'index_file_size': index_file_size,
'metric_type': MetricType.IP}
connect.create_table(param)
time.sleep(6)
for i in range(num_tables):
status, ids = connect.add_vectors(table_name=table_list[i], records=vectors)
assert status.OK()
status = connect.compact(table_list[i])
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_vector_after_compact(self, connect, ip_table):
'''
target: test add vector after compact
method: after compact operation, add vector
expected: status ok, vector added
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(ip_table, vectors)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info before compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_before = info.partitions_stat[0].segments_stat[0].data_size
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
# get table info after compact
status, info = connect.table_info(ip_table)
assert status.OK()
size_after = info.partitions_stat[0].segments_stat[0].data_size
assert(size_before == size_after)
vector = gen_single_vector(dim)
status, ids = connect.add_vectors(ip_table, vector)
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_delete_vectors_after_compact(self, connect, ip_table):
'''
target: test delete vectors after compact
method: after compact operation, delete vectors
expected: status ok, vectors deleted
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(ip_table, vectors)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
status = connect.delete_by_id(ip_table, ids)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_search_after_compact(self, connect, ip_table):
'''
target: test search after compact
method: after compact operation, search vector
expected: status ok
'''
vectors = gen_vector(nb, dim)
status, ids = connect.add_vectors(ip_table, vectors)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
status = connect.compact(ip_table)
assert status.OK()
status = connect.flush([ip_table])
assert status.OK()
query_vecs = [vectors[0]]
status, res = connect.search_vectors(ip_table, top_k, nprobe, query_vecs)
logging.getLogger().info(res)
assert status.OK()
| 39.537428
| 104
| 0.626001
| 5,032
| 41,198
| 4.939587
| 0.037361
| 0.084004
| 0.127293
| 0.127655
| 0.941061
| 0.936072
| 0.923962
| 0.920743
| 0.920341
| 0.916117
| 0
| 0.006025
| 0.270766
| 41,198
| 1,041
| 105
| 39.575408
| 0.821323
| 0.173067
| 0
| 0.889481
| 0
| 0
| 0.011124
| 0.003134
| 0
| 0
| 0
| 0
| 0.34221
| 1
| 0.050599
| false
| 0
| 0.011984
| 0
| 0.067909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7c2107c07d853962704508200aa975a8f57f23b
| 2,729
|
py
|
Python
|
tests/test_series/test_getters.py
|
bearsh/raccoon
|
bd7a59c3dcf7ad7b995194a4a49631759d9e565c
|
[
"MIT"
] | 62
|
2016-07-11T01:23:15.000Z
|
2022-01-14T17:42:17.000Z
|
tests/test_series/test_getters.py
|
bearsh/raccoon
|
bd7a59c3dcf7ad7b995194a4a49631759d9e565c
|
[
"MIT"
] | 13
|
2016-07-11T01:24:02.000Z
|
2021-05-17T14:51:58.000Z
|
tests/test_series/test_getters.py
|
bearsh/raccoon
|
bd7a59c3dcf7ad7b995194a4a49631759d9e565c
|
[
"MIT"
] | 14
|
2017-03-22T17:23:02.000Z
|
2021-05-08T05:16:30.000Z
|
import pytest
import raccoon as rc
def test_index():
actual = rc.Series([4, 5, 6], index=['a', 'b', 'c'])
result = actual.index
assert result == ['a', 'b', 'c']
assert isinstance(result, list)
# test that a view is returned
result.append('bad')
assert actual.index == ['a', 'b', 'c', 'bad']
actual.index = [9, 10, 11]
assert actual.index == [9, 10, 11]
assert isinstance(result, list)
# index too long
with pytest.raises(ValueError):
actual.index = [1, 3, 4, 5, 6]
assert actual.index_name == 'index'
actual.index_name = 'new name'
assert actual.index_name == 'new name'
actual = rc.Series([4, 5, 6], index=['a', 'b', 'c'], index_name='letters')
assert actual.index_name == 'letters'
def test_index_view():
data = [4, 5, 6]
index = ['a', 'b', 'c']
actual = rc.ViewSeries(data, index)
result = actual.index
assert result == ['a', 'b', 'c']
assert isinstance(result, list)
# test that a view is returned
assert result is index
assert result is actual.index
# modify
result[1] = 'new'
assert actual.index == ['a', 'new', 'c']
assert index == ['a', 'new', 'c']
# index too long
with pytest.raises(ValueError):
actual.index = [1, 3, 4, 5, 6]
assert actual.index_name == 'index'
actual.index_name = 'new name'
assert actual.index_name == 'new name'
actual = rc.Series([4, 5, 6], index=['a', 'b', 'c'], index_name='letters')
assert actual.index_name == 'letters'
def test_data():
data = [4, 5, 6]
index = ['a', 'b', 'c']
actual = rc.Series(data, index)
assert isinstance(actual.data, list)
assert data is not actual.data
assert actual.data == [4, 5, 6]
# test data is a view and changes to the .data will corrupt the Series
new = actual.data
new[0] = 99
assert actual.data == new
new.append(88)
assert new == [99, 5, 6, 88]
assert actual.data == [99, 5, 6, 88]
with pytest.raises(AttributeError):
# noinspection PyPropertyAccess
actual.data = [4, 5]
def test_data_view():
data = [4, 5, 6]
index = ['a', 'b', 'c']
actual = rc.ViewSeries(data, index)
assert isinstance(actual.data, list)
assert data is actual.data
assert actual.data == [4, 5, 6]
# test data is a copy
new = actual.data
new[0] = 99
assert actual.data == new
assert data == new
# changing the data can cause issues
new.append(88)
assert new == [99, 5, 6, 88]
assert actual.data == [99, 5, 6, 88]
assert actual.index == ['a', 'b', 'c']
with pytest.raises(AttributeError):
# noinspection PyPropertyAccess
actual.data = [4, 5]
| 25.036697
| 78
| 0.584097
| 389
| 2,729
| 4.056555
| 0.154242
| 0.125475
| 0.019011
| 0.040558
| 0.813055
| 0.813055
| 0.759823
| 0.759823
| 0.759823
| 0.759823
| 0
| 0.041069
| 0.259436
| 2,729
| 108
| 79
| 25.268519
| 0.739733
| 0.101869
| 0
| 0.7
| 0
| 0
| 0.04877
| 0
| 0
| 0
| 0
| 0
| 0.442857
| 1
| 0.057143
| false
| 0
| 0.028571
| 0
| 0.085714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7d15e3dc64c0784d3e711fd2f8a912613309e9f
| 21,415
|
py
|
Python
|
python/cuXfilter/layouts/layouts.py
|
AjayThorve/cuxfilter
|
537ff67de80439a43e0bad7373558f5e25dcb112
|
[
"Apache-2.0"
] | 2
|
2019-03-06T02:10:05.000Z
|
2020-05-06T06:33:02.000Z
|
python/cuXfilter/layouts/layouts.py
|
AjayThorve/cuxfilter
|
537ff67de80439a43e0bad7373558f5e25dcb112
|
[
"Apache-2.0"
] | null | null | null |
python/cuXfilter/layouts/layouts.py
|
AjayThorve/cuxfilter
|
537ff67de80439a43e0bad7373558f5e25dcb112
|
[
"Apache-2.0"
] | null | null | null |
from panel import GridSpec
from panel import extension
from panel import Column
import panel as pn
from .layout_templates import *
class Layout0:
def generate_dashboard(self, title, charts):
"""
layout 0
[1]
"""
tmpl = pn.Template(layout_0)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 1600
chart.height = int(round(90*1.0))*10
tmpl.add_panel('chart1', chart.view())
else:
break
return tmpl
class Layout1:
def generate_dashboard(self, title, charts):
"""
layout 1
[1]
[2]
"""
tmpl = pn.Template(layout_1)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 1600
chart.height = int(round(90*0.66))*10
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = 1600
chart.height = int(round(90*0.33))*10
tmpl.add_panel('chart2', chart.view())
else:
break
n = 2 - num_of_charts_added
for i in range(n):
chart = 2-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout2:
def generate_dashboard(self, title, charts):
"""
layout 2
[1 2]
"""
tmpl = pn.Template(layout_2)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 900
chart.height = 900
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = 900
chart.height = 900
tmpl.add_panel('chart2', chart.view())
else:
break
n = 2 - num_of_charts_added
for i in range(n):
chart = 2-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout3:
def generate_dashboard(self, title, charts):
"""
layout 3
[1 2]
[1 3]
"""
tmpl = pn.Template(layout_3)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 900
chart.height = 900
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = 900
chart.height = 450
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = 900
chart.height = 450
tmpl.add_panel('chart3', chart.view())
else:
break
n = 3 - num_of_charts_added
for i in range(n):
chart = 3-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout4:
def generate_dashboard(self, title, charts):
"""
layout 4
[1 2 3]
"""
tmpl = pn.Template(layout_4)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600*0.33)
chart.height = int(1600*0.33)
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600*0.33)
chart.height = int(1600*0.33)
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600*0.33)
chart.height = int(1600*0.33)
tmpl.add_panel('chart3', chart.view())
else:
break
n = 3 - num_of_charts_added
for i in range(n):
chart = 3-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout5:
def generate_dashboard(self, title, charts):
"""
layout 5
[ 1 ]
[2 3]
"""
tmpl = pn.Template(layout_5)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 1600
chart.height = 600
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = 800
chart.height = 300
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = 800
chart.height = 300
tmpl.add_panel('chart3', chart.view())
else:
break
n = 3 - num_of_charts_added
for i in range(n):
chart = 3-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout6:
def generate_dashboard(self, title, charts):
"""
layout 6
[1 2]
[3 4]
"""
tmpl = pn.Template(layout_6)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 800
chart.height = 450
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = 800
chart.height = 450
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = 800
chart.height = 450
tmpl.add_panel('chart3', chart.view())
elif num_of_charts_added == 4:
chart.chart.sizing_mode = 'scale_both'
chart.width = 800
chart.height = 450
tmpl.add_panel('chart4', chart.view())
else:
break
n = 4 - num_of_charts_added
for i in range(n):
chart = 4-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout7:
def generate_dashboard(self, title, charts):
"""
layout 7
[ 1 ]
[2 3 4]
"""
tmpl = pn.Template(layout_7)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 1600
chart.height = 600
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart3', chart.view())
elif num_of_charts_added == 4:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart4', chart.view())
else:
break
n = 4 - num_of_charts_added
for i in range(n):
chart = 4-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout8:
def generate_dashboard(self, title, charts):
"""
layout 8
[ 1 ]
[2 3 4 5]
"""
tmpl = pn.Template(layout_8)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 1600
chart.height = 600
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart3', chart.view())
elif num_of_charts_added == 4:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart4', chart.view())
elif num_of_charts_added == 5:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart5', chart.view())
else:
break
n = 5 - num_of_charts_added
for i in range(n):
chart = 5-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout9:
def generate_dashboard(self, title, charts):
"""
layout 9
[1 1 2]
[1 1 3]
[4 5 6]
"""
tmpl = pn.Template(layout_9)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = 1200
chart.height = 600
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart3', chart.view())
elif num_of_charts_added == 4:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart4', chart.view())
elif num_of_charts_added == 5:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart5', chart.view())
elif num_of_charts_added == 6:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart6', chart.view())
else:
break
n = 6 - num_of_charts_added
for i in range(n):
chart = 6-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout10:
def generate_dashboard(self, title, charts):
"""
layout 10
[1 2 3]
[4 5 6]
"""
tmpl = pn.Template(layout_10)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 450
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 450
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 450
tmpl.add_panel('chart3', chart.view())
elif num_of_charts_added == 4:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 450
tmpl.add_panel('chart4', chart.view())
elif num_of_charts_added == 5:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 450
tmpl.add_panel('chart5', chart.view())
elif num_of_charts_added == 6:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 450
tmpl.add_panel('chart6', chart.view())
else:
break
n = 6 - num_of_charts_added
for i in range(n):
chart = 6-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout11:
def generate_dashboard(self, title, charts):
"""
layout 11
[ 1 2 ]
[3 4 5 6]
"""
tmpl = pn.Template(layout_11)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/2)
chart.height = 600
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/2)
chart.height = 600
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart3', chart.view())
elif num_of_charts_added == 4:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart4', chart.view())
elif num_of_charts_added == 5:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart5', chart.view())
elif num_of_charts_added == 6:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/4)
chart.height = 300
tmpl.add_panel('chart6', chart.view())
else:
break
n = 6 - num_of_charts_added
for i in range(n):
chart = 6-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
class Layout12:
def generate_dashboard(self, title, charts):
"""
layout 12
[1 2 3]
[4 5 6]
[7 8 9]
"""
tmpl = pn.Template(layout_12)
tmpl.add_panel('title', '<div class="nav-title"> '+str(title)+'</div>')
num_of_charts_added = 0
for chart in charts.values():
if 'widget' in chart.chart_type or chart.chart_type == 'datasize_indicator':
continue
num_of_charts_added +=1
if num_of_charts_added == 1:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart1', chart.view())
elif num_of_charts_added == 2:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart2', chart.view())
elif num_of_charts_added == 3:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart3', chart.view())
elif num_of_charts_added == 4:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart4', chart.view())
elif num_of_charts_added == 5:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart5', chart.view())
elif num_of_charts_added == 6:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart6', chart.view())
elif num_of_charts_added == 7:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart7', chart.view())
elif num_of_charts_added == 8:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart8', chart.view())
elif num_of_charts_added == 9:
chart.chart.sizing_mode = 'scale_both'
chart.width = int(1600/3)
chart.height = 300
tmpl.add_panel('chart9', chart.view())
else:
break
n = 9 - num_of_charts_added
for i in range(n):
chart = 9-i
tmpl.add_panel('chart'+str(chart),'')
return tmpl
| 32.251506
| 88
| 0.497502
| 2,467
| 21,415
| 4.104175
| 0.039724
| 0.045432
| 0.099951
| 0.145383
| 0.963259
| 0.957827
| 0.952889
| 0.884938
| 0.877827
| 0.874272
| 0
| 0.053837
| 0.396311
| 21,415
| 664
| 89
| 32.251506
| 0.729347
| 0.016671
| 0
| 0.906054
| 1
| 0
| 0.082251
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02714
| false
| 0
| 0.010438
| 0
| 0.091858
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b7ef77f62342982a46c771a6b52b04a9cd2cc621
| 6,548
|
py
|
Python
|
robovat/envs/push/layouts.py
|
leobxpan/robovat
|
0d360c34c677cf018c4daab0b8e758943ae1d2c1
|
[
"MIT"
] | 62
|
2020-04-08T11:26:24.000Z
|
2021-09-06T02:45:53.000Z
|
robovat/envs/push/layouts.py
|
leobxpan/robovat
|
0d360c34c677cf018c4daab0b8e758943ae1d2c1
|
[
"MIT"
] | 7
|
2020-04-12T13:10:10.000Z
|
2022-03-12T00:15:03.000Z
|
robovat/envs/push/layouts.py
|
leobxpan/robovat
|
0d360c34c677cf018c4daab0b8e758943ae1d2c1
|
[
"MIT"
] | 17
|
2020-04-12T17:37:01.000Z
|
2021-09-07T01:51:46.000Z
|
"""Reward function of the environments.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
PushLayout = collections.namedtuple(
'PushLayout',
('size',
'offset',
'region',
'goal',
'target',
'obstacle',
'region_rgba',
'goal_rgba',
)
)
TASK_NAME_TO_LAYOUTS = {
'clearing': [
PushLayout(
size=0.15,
# Offset: [0.6 - 0.76/2 + 0.075, 0 - 1.22/2 + 0.05 + 0.075]
offset=[0.295, -0.485],
region=[
[0, 2], [0, 3], [0, 4], [0, 5],
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 2], [2, 3], [2, 4], [2, 5],
],
goal=None,
target=[
[1, 3], [1, 4],
[2, 3], [2, 4],
],
obstacle=[
[1, 3], [1, 4],
[2, 3], [2, 4],
],
region_rgba=[0.4667, 0.7098, 0.9961, 1],
goal_rgba=None,
),
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 2], [2, 3], [2, 4], [2, 5],
],
goal=None,
target=[
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 2], [2, 3], [2, 4], [2, 5],
],
obstacle=[
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 2], [2, 3], [2, 4], [2, 5],
],
region_rgba=[0.4667, 0.7098, 0.9961, 1],
goal_rgba=None,
),
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[0, 2], [0, 3], [0, 4], [0, 5],
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 3], [2, 4],
],
goal=None,
target=[
[0, 2], [0, 3], [0, 4], [0, 5],
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 3], [2, 4],
],
obstacle=[
[0, 2], [0, 3], [0, 4], [0, 5],
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 3], [2, 4],
],
region_rgba=[0.4667, 0.7098, 0.9961, 1],
goal_rgba=None,
),
],
'insertion': [
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[0, 0], [0, 1],
[1, 0], [1, 1],
[2, 0],
[3, 0], [3, 1],
[4, 0], [4, 1],
],
goal=[
[2, 1],
],
target=[
[2, 3], [2, 4],
],
obstacle=[
[1, 3], [1, 4],
[2, 3], [2, 4],
[3, 3], [3, 4],
],
region_rgba=[1, .4235, .4235, 1],
goal_rgba=[0.867, 0.776, 0.678, 0],
),
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[0, 0], [0, 1],
[1, 0], [1, 1],
[2, 0],
[3, 0], [3, 1],
[4, 0], [4, 1],
],
goal=[
[2, 1],
],
target=[
[2, 3], [2, 4],
],
obstacle=[
[1, 3], [1, 4],
[2, 3], [2, 4],
[3, 3], [3, 4],
],
region_rgba=[1, .4235, .4235, 1],
goal_rgba=[0.867, 0.776, 0.678, 0],
),
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[3, 1], [3, 2], [3, 5], [3, 6],
[4, 1], [4, 2], [4, 3], [4, 4], [4, 5], [4, 6]
],
goal=[
[2, 1],
],
target=[
[1, 2], [1, 3], [1, 4], [1, 5],
],
obstacle=[
[1, 2], [1, 3], [1, 4], [1, 5],
[2, 2], [2, 3], [2, 4], [2, 5],
],
region_rgba=[1, .4235, .4235, 1],
goal_rgba=[0.867, 0.776, 0.678, 0],
),
],
'crossing': [
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[0, 0], [0, 2], [0, 5],
[1, 0], [1, 1], [1, 2], [1, 5],
[2, 2], [2, 3], [2, 4], [2, 5],
[3, 2],
],
goal=[
[1, 2],
],
target=[
[2, 5],
],
obstacle=[
[1, 1], [1, 2], [1, 3], [1, 4], [1, 5], [1, 6],
[2, 1], [2, 2], [2, 3], [2, 4], [2, 5], [2, 6],
[3, 1], [3, 2], [3, 3], [3, 4], [3, 5], [3, 6],
],
region_rgba=[0.8, 0.8, 0.8, 1],
goal_rgba=[1, 0.9412, 0.4235, 1],
),
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[0, 0], [0, 1], [0, 2], [0, 5],
[1, 0], [1, 2], [1, 3], [1, 4], [1, 5],
[2, 0], [2, 2], [2, 5],
[3, 2], [3, 5],
],
goal=[
[3, 2],
],
target=[
[1, 4],
[1, 5],
],
obstacle=[
[1, 1], [1, 2], [1, 3], [1, 4], [1, 5], [1, 6],
[2, 1], [2, 2], [2, 3], [2, 4], [2, 5], [2, 6],
[3, 1], [3, 2], [3, 3], [3, 4], [3, 5], [3, 6],
],
region_rgba=[0.8, 0.8, 0.8, 1],
goal_rgba=[1, 0.9412, 0.4235, 1],
),
PushLayout(
size=0.15,
offset=[0.295, -0.485],
region=[
[0, 2], [0, 5],
[1, 2], [1, 5], [1, 6],
[2, 1], [2, 2], [2, 3], [2, 4], [2, 5],
[3, 1], [3, 2], [3, 5],
],
goal=[
[1, 6],
],
target=[
[1, 2],
[2, 1],
[2, 2],
],
obstacle=[
[1, 1], [1, 2], [1, 3], [1, 4], [1, 5], [1, 6],
[2, 1], [2, 2], [2, 3], [2, 4], [2, 5], [2, 6],
[3, 1], [3, 2], [3, 3], [3, 4], [3, 5], [3, 6],
],
region_rgba=[0.8, 0.8, 0.8, 1],
goal_rgba=[1, 0.9412, 0.4235, 1],
),
]
}
| 26.617886
| 71
| 0.252902
| 780
| 6,548
| 2.075641
| 0.066667
| 0.033354
| 0.035207
| 0.046943
| 0.776405
| 0.760346
| 0.736875
| 0.729463
| 0.72761
| 0.712786
| 0
| 0.247813
| 0.528558
| 6,548
| 245
| 72
| 26.726531
| 0.276644
| 0.014508
| 0
| 0.826667
| 0
| 0
| 0.013807
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017778
| 0
| 0.017778
| 0.004444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4d7f75af9a3a7fbb74737875200cb9943fcc2ec9
| 174
|
py
|
Python
|
scripts/mango/fields/__init__.py
|
robertjoosten/maya-orm
|
9c5db622d5bbba63246ff1d3f0a22bd3f7140f6c
|
[
"MIT"
] | 11
|
2020-11-14T14:37:49.000Z
|
2022-03-25T03:28:23.000Z
|
scripts/mango/fields/__init__.py
|
robertjoosten/maya-orm
|
9c5db622d5bbba63246ff1d3f0a22bd3f7140f6c
|
[
"MIT"
] | null | null | null |
scripts/mango/fields/__init__.py
|
robertjoosten/maya-orm
|
9c5db622d5bbba63246ff1d3f0a22bd3f7140f6c
|
[
"MIT"
] | null | null | null |
from mango.fields.base import *
from mango.fields.generic import *
from mango.fields.arrays import *
from mango.fields.compounds import *
from mango.fields.geometry import *
| 29
| 36
| 0.798851
| 25
| 174
| 5.56
| 0.36
| 0.323741
| 0.539568
| 0.604317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114943
| 174
| 5
| 37
| 34.8
| 0.902597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4d920eb4b0ef760d1471d73967715df0baf8a64b
| 35
|
py
|
Python
|
src/sqr_eqs.py
|
qxiddd/otus-architecture-patterns-2022-02
|
de49c5953b5e3adbbc2ce8acb497c4903cc2b306
|
[
"MIT"
] | null | null | null |
src/sqr_eqs.py
|
qxiddd/otus-architecture-patterns-2022-02
|
de49c5953b5e3adbbc2ce8acb497c4903cc2b306
|
[
"MIT"
] | null | null | null |
src/sqr_eqs.py
|
qxiddd/otus-architecture-patterns-2022-02
|
de49c5953b5e3adbbc2ce8acb497c4903cc2b306
|
[
"MIT"
] | null | null | null |
def hello_world():
return True
| 11.666667
| 18
| 0.685714
| 5
| 35
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228571
| 35
| 2
| 19
| 17.5
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
4d938e7160f5857c02d3758f0ae61e9ccac6f49c
| 141
|
py
|
Python
|
tools/__init__.py
|
BALAVIGNESHDOSTRIX/py-amr-pignus
|
869afa3c2113549cc186f5bbc2d2acf9cb521fb2
|
[
"MIT"
] | null | null | null |
tools/__init__.py
|
BALAVIGNESHDOSTRIX/py-amr-pignus
|
869afa3c2113549cc186f5bbc2d2acf9cb521fb2
|
[
"MIT"
] | null | null | null |
tools/__init__.py
|
BALAVIGNESHDOSTRIX/py-amr-pignus
|
869afa3c2113549cc186f5bbc2d2acf9cb521fb2
|
[
"MIT"
] | null | null | null |
from . import file_encrypt_decrypt
from . import dir_helper
from . import csvfile_helper
from . import filename_handler
from . import player
| 28.2
| 34
| 0.822695
| 20
| 141
| 5.55
| 0.55
| 0.45045
| 0.288288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141844
| 141
| 5
| 35
| 28.2
| 0.917355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4da9b5a8e3c28c5caf83f69cad331cdd38aeceae
| 100,508
|
py
|
Python
|
output_python/PEPS_Basics_Added.py
|
ryuikaneko/itps_contraction
|
10816fb6c90d77f5a3b2f804ab22573d1d676eb4
|
[
"MIT"
] | 1
|
2019-12-19T05:03:37.000Z
|
2019-12-19T05:03:37.000Z
|
output_python/PEPS_Basics_Added.py
|
ryuikaneko/itps_contraction
|
10816fb6c90d77f5a3b2f804ab22573d1d676eb4
|
[
"MIT"
] | null | null | null |
output_python/PEPS_Basics_Added.py
|
ryuikaneko/itps_contraction
|
10816fb6c90d77f5a3b2f804ab22573d1d676eb4
|
[
"MIT"
] | null | null | null |
# coding:utf-8
import numpy as np
import scipy as scipy
import scipy.linalg as linalg
import scipy.sparse.linalg as spr_linalg
import scipy.linalg.interpolative
from PEPS_Parameters import *
def Contract_scalar_1x1(\
t0_2,t1_2,t2_2,\
t0_1,t1_1,t2_1,\
t0_0,t1_0,t2_0,\
o1_1\
):
##############################
# ./input/input_Lx1Ly1.dat
##############################
# (o1_1*(t1_1.conj()*((t2_1*(t2_0*t1_0))*(t1_1*((t0_0*t0_1)*(t0_2*(t2_2*t1_2)))))))
# cpu_cost= 6.04e+10 memory= 4.0004e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_1, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
t2_1, np.tensordot(
t2_0, t1_0, ([1], [0])
), ([1], [0])
), np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t0_0, t0_1, ([1], [0])
), np.tensordot(
t0_2, np.tensordot(
t2_2, t1_2, ([0], [1])
), ([1], [1])
), ([1], [0])
), ([0, 1], [1, 4])
), ([0, 1, 3, 4], [5, 0, 3, 1])
), ([0, 1, 2, 3], [3, 4, 0, 1])
), ([0, 1], [1, 0])
)
def Contract_scalar_1x2(\
t0_3,t1_3,t2_3,\
t0_2,t1_2,t2_2,\
t0_1,t1_1,t2_1,\
t0_0,t1_0,t2_0,\
o1_2,\
o1_1\
):
##############################
# ./input/input_Lx1Ly2.dat
##############################
# (o1_1*(t1_1.conj()*((t0_1*(t0_0*t1_0))*(t1_1*((t2_0*t2_1)*(t2_2*(t1_2.conj()*((o1_2*t1_2)*(t0_2*(t0_3*(t2_3*t1_3)))))))))))
# cpu_cost= 1.204e+11 memory= 4.0209e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_1, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
t0_1, np.tensordot(
t0_0, t1_0, ([0], [1])
), ([0], [0])
), np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t2_0, t2_1, ([0], [1])
), np.tensordot(
t2_2, np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
o1_2, t1_2, ([0], [4])
), np.tensordot(
t0_2, np.tensordot(
t0_3, np.tensordot(
t2_3, t1_3, ([0], [1])
), ([1], [1])
), ([1], [0])
), ([1, 2], [1, 4])
), ([0, 1, 4], [4, 6, 0])
), ([0, 2, 3], [5, 2, 0])
), ([1], [0])
), ([1, 2], [4, 1])
), ([0, 1, 3, 4], [6, 0, 3, 1])
), ([0, 1, 2, 3], [0, 4, 3, 1])
), ([0, 1], [1, 0])
)
def Contract_scalar_1x3(\
t0_4,t1_4,t2_4,\
t0_3,t1_3,t2_3,\
t0_2,t1_2,t2_2,\
t0_1,t1_1,t2_1,\
t0_0,t1_0,t2_0,\
o1_3,\
o1_2,\
o1_1\
):
##############################
# ./input/input_Lx1Ly3.dat
##############################
# (o1_2*(t1_2*((t2_2*(t2_1*(t1_1*((o1_1*t1_1.conj())*(t0_1*(t0_0*(t2_0*t1_0)))))))*(t1_2.conj()*(t0_2*(t0_3*(t1_3*((o1_3*t1_3.conj())*(t2_3*(t0_4*(t2_4*t1_4)))))))))))
# cpu_cost= 1.804e+11 memory= 5.0206e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_2, np.tensordot(
t1_2, np.tensordot(
np.tensordot(
t2_2, np.tensordot(
t2_1, np.tensordot(
t1_1, np.tensordot(
np.tensordot(
o1_1, t1_1.conj(), ([1], [4])
), np.tensordot(
t0_1, np.tensordot(
t0_0, np.tensordot(
t2_0, t1_0, ([1], [0])
), ([0], [1])
), ([0], [0])
), ([1, 4], [2, 5])
), ([0, 3, 4], [4, 6, 0])
), ([1, 2, 3], [5, 1, 3])
), ([1], [0])
), np.tensordot(
t1_2.conj(), np.tensordot(
t0_2, np.tensordot(
t0_3, np.tensordot(
t1_3, np.tensordot(
np.tensordot(
o1_3, t1_3.conj(), ([1], [4])
), np.tensordot(
t2_3, np.tensordot(
t0_4, np.tensordot(
t2_4, t1_4, ([0], [1])
), ([1], [1])
), ([0], [1])
), ([2, 3], [5, 2])
), ([1, 2, 4], [6, 4, 0])
), ([1, 2, 3], [5, 0, 2])
), ([1], [0])
), ([0, 1], [2, 4])
), ([0, 2, 4, 5], [6, 0, 1, 3])
), ([0, 1, 2, 3], [3, 4, 0, 1])
), ([0, 1], [0, 1])
)
def Contract_scalar_1x4(\
t0_5,t1_5,t2_5,\
t0_4,t1_4,t2_4,\
t0_3,t1_3,t2_3,\
t0_2,t1_2,t2_2,\
t0_1,t1_1,t2_1,\
t0_0,t1_0,t2_0,\
o1_4,\
o1_3,\
o1_2,\
o1_1\
):
##############################
# ./input/input_Lx1Ly4.dat
##############################
# (o1_1*(t1_1.conj()*((t1_0*(t2_0*t2_1))*(t1_1*((t0_0*t0_1)*(t0_2*(t1_2.conj()*((o1_2*t1_2)*(t2_2*(t2_3*(t1_3*((o1_3*t1_3.conj())*(t0_3*(t0_4*(t1_4.conj()*((t1_4*o1_4)*(t2_4*(t0_5*(t1_5*t2_5)))))))))))))))))))
# cpu_cost= 2.404e+11 memory= 4.0617e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_1, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
t1_0, np.tensordot(
t2_0, t2_1, ([0], [1])
), ([0], [0])
), np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t0_0, t0_1, ([1], [0])
), np.tensordot(
t0_2, np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
o1_2, t1_2, ([0], [4])
), np.tensordot(
t2_2, np.tensordot(
t2_3, np.tensordot(
t1_3, np.tensordot(
np.tensordot(
o1_3, t1_3.conj(), ([1], [4])
), np.tensordot(
t0_3, np.tensordot(
t0_4, np.tensordot(
t1_4.conj(), np.tensordot(
np.tensordot(
t1_4, o1_4, ([4], [0])
), np.tensordot(
t2_4, np.tensordot(
t0_5, np.tensordot(
t1_5, t2_5, ([1], [0])
), ([1], [0])
), ([0], [3])
), ([1, 2], [4, 1])
), ([1, 2, 4], [6, 4, 2])
), ([1, 2, 3], [5, 2, 0])
), ([1], [0])
), ([1, 2], [2, 3])
), ([0, 1, 4], [4, 5, 0])
), ([0, 2, 3], [5, 0, 2])
), ([0], [0])
), ([2, 3], [3, 1])
), ([1, 2, 4], [5, 4, 0])
), ([1, 2, 3], [5, 2, 0])
), ([1], [0])
), ([0, 1], [1, 4])
), ([0, 1, 3, 4], [3, 1, 6, 0])
), ([0, 1, 2, 3], [3, 4, 1, 0])
), ([0, 1], [1, 0])
)
def Contract_scalar_1x5(\
t0_6,t1_6,t2_6,\
t0_5,t1_5,t2_5,\
t0_4,t1_4,t2_4,\
t0_3,t1_3,t2_3,\
t0_2,t1_2,t2_2,\
t0_1,t1_1,t2_1,\
t0_0,t1_0,t2_0,\
o1_5,\
o1_4,\
o1_3,\
o1_2,\
o1_1\
):
##############################
# ./input/input_Lx1Ly5.dat
##############################
# (o1_2*(t1_2.conj()*((t0_2*(t0_1*(t1_1.conj()*((o1_1*t1_1)*(t2_1*(t0_0*(t2_0*t1_0)))))))*(t1_2*(t2_2*(t0_3*(t1_3.conj()*((t1_3*o1_3)*(t2_3*(t0_4*(t1_4.conj()*((o1_4*t1_4)*(t2_4*(t0_5*(t1_5.conj()*((o1_5*t1_5)*(t2_5*(t0_6*(t2_6*t1_6)))))))))))))))))))
# cpu_cost= 3.004e+11 memory= 5.0206e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_2, np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
t0_2, np.tensordot(
t0_1, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
o1_1, t1_1, ([0], [4])
), np.tensordot(
t2_1, np.tensordot(
t0_0, np.tensordot(
t2_0, t1_0, ([1], [0])
), ([0], [1])
), ([1], [1])
), ([3, 4], [1, 4])
), ([2, 3, 4], [4, 6, 0])
), ([0, 2, 3], [5, 2, 0])
), ([0], [0])
), np.tensordot(
t1_2, np.tensordot(
t2_2, np.tensordot(
t0_3, np.tensordot(
t1_3.conj(), np.tensordot(
np.tensordot(
t1_3, o1_3, ([4], [0])
), np.tensordot(
t2_3, np.tensordot(
t0_4, np.tensordot(
t1_4.conj(), np.tensordot(
np.tensordot(
o1_4, t1_4, ([0], [4])
), np.tensordot(
t2_4, np.tensordot(
t0_5, np.tensordot(
t1_5.conj(), np.tensordot(
np.tensordot(
o1_5, t1_5, ([0], [4])
), np.tensordot(
t2_5, np.tensordot(
t0_6, np.tensordot(
t2_6, t1_6, ([0], [1])
), ([1], [1])
), ([0], [1])
), ([2, 3], [4, 1])
), ([1, 2, 4], [6, 4, 0])
), ([1, 2, 3], [5, 2, 0])
), ([0], [3])
), ([2, 3], [5, 1])
), ([1, 2, 4], [6, 4, 0])
), ([1, 2, 3], [5, 2, 0])
), ([0], [3])
), ([1, 2], [5, 1])
), ([1, 2, 4], [6, 4, 2])
), ([1, 2, 3], [5, 2, 0])
), ([0], [3])
), ([1, 2], [5, 1])
), ([0, 1, 4, 5], [5, 0, 1, 3])
), ([0, 1, 2, 3], [0, 4, 3, 1])
), ([0, 1], [1, 0])
)
def Contract_scalar_1x6(\
t0_7,t1_7,t2_7,\
t0_6,t1_6,t2_6,\
t0_5,t1_5,t2_5,\
t0_4,t1_4,t2_4,\
t0_3,t1_3,t2_3,\
t0_2,t1_2,t2_2,\
t0_1,t1_1,t2_1,\
t0_0,t1_0,t2_0,\
o1_6,\
o1_5,\
o1_4,\
o1_3,\
o1_2,\
o1_1\
):
##############################
# ./input/input_Lx1Ly6.dat
##############################
# (o1_3*(t1_3.conj()*((t0_3*(t2_2*(t1_2*((t1_2.conj()*o1_2)*(t0_2*(t2_1*(t1_1.conj()*((o1_1*t1_1)*(t0_1*(t0_0*(t2_0*t1_0)))))))))))*(t1_3*(t2_3*(t0_4*(t1_4.conj()*((o1_4*t1_4)*(t2_4*(t0_5*(t1_5*((o1_5*t1_5.conj())*(t2_5*(t2_6*(t1_6.conj()*((t1_6*o1_6)*(t0_6*(t0_7*(t2_7*t1_7)))))))))))))))))))
# cpu_cost= 3.604e+11 memory= 5.041e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_3, np.tensordot(
t1_3.conj(), np.tensordot(
np.tensordot(
t0_3, np.tensordot(
t2_2, np.tensordot(
t1_2, np.tensordot(
np.tensordot(
t1_2.conj(), o1_2, ([4], [1])
), np.tensordot(
t0_2, np.tensordot(
t2_1, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
o1_1, t1_1, ([0], [4])
), np.tensordot(
t0_1, np.tensordot(
t0_0, np.tensordot(
t2_0, t1_0, ([1], [0])
), ([0], [1])
), ([0], [0])
), ([1, 4], [1, 4])
), ([0, 3, 4], [4, 6, 0])
), ([1, 2, 3], [5, 3, 1])
), ([0], [3])
), ([0, 3], [2, 4])
), ([0, 3, 4], [4, 6, 2])
), ([1, 2, 3], [5, 1, 3])
), ([0], [3])
), np.tensordot(
t1_3, np.tensordot(
t2_3, np.tensordot(
t0_4, np.tensordot(
t1_4.conj(), np.tensordot(
np.tensordot(
o1_4, t1_4, ([0], [4])
), np.tensordot(
t2_4, np.tensordot(
t0_5, np.tensordot(
t1_5, np.tensordot(
np.tensordot(
o1_5, t1_5.conj(), ([1], [4])
), np.tensordot(
t2_5, np.tensordot(
t2_6, np.tensordot(
t1_6.conj(), np.tensordot(
np.tensordot(
t1_6, o1_6, ([4], [0])
), np.tensordot(
t0_6, np.tensordot(
t0_7, np.tensordot(
t2_7, t1_7, ([0], [1])
), ([1], [1])
), ([1], [0])
), ([0, 1], [1, 4])
), ([0, 1, 4], [4, 6, 2])
), ([0, 2, 3], [5, 2, 0])
), ([0], [0])
), ([2, 3], [3, 2])
), ([1, 2, 4], [5, 4, 0])
), ([1, 2, 3], [5, 0, 2])
), ([0], [3])
), ([2, 3], [4, 1])
), ([1, 2, 4], [6, 4, 0])
), ([1, 2, 3], [5, 2, 0])
), ([0], [3])
), ([1, 2], [5, 1])
), ([0, 1, 3, 4], [5, 0, 3, 1])
), ([0, 1, 2, 3], [0, 4, 3, 1])
), ([0, 1], [1, 0])
)
def Contract_scalar_2x1(\
t0_2,t1_2,t2_2,t3_2,\
t0_1,t1_1,t2_1,t3_1,\
t0_0,t1_0,t2_0,t3_0,\
o1_1,o2_1\
):
##############################
# ./input/input_Lx2Ly1.dat
##############################
# (o1_1*(t1_1.conj()*((t0_1*(t0_2*t1_2))*(t1_1*((t0_0*t1_0)*(t2_0*(t2_1.conj()*((o2_1*t2_1)*(t2_2*(t3_0*(t3_1*t3_2)))))))))))
# cpu_cost= 1.204e+11 memory= 4.0209e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_1, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
t0_1, np.tensordot(
t0_2, t1_2, ([1], [0])
), ([1], [0])
), np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t0_0, t1_0, ([0], [1])
), np.tensordot(
t2_0, np.tensordot(
t2_1.conj(), np.tensordot(
np.tensordot(
o2_1, t2_1, ([0], [4])
), np.tensordot(
t2_2, np.tensordot(
t3_0, np.tensordot(
t3_1, t3_2, ([0], [1])
), ([0], [0])
), ([1], [3])
), ([2, 3], [1, 4])
), ([1, 2, 4], [4, 6, 0])
), ([0, 2, 3], [5, 3, 1])
), ([1], [0])
), ([2, 3], [4, 1])
), ([0, 1, 3, 4], [3, 0, 6, 1])
), ([0, 1, 2, 3], [0, 1, 4, 3])
), ([0, 1], [1, 0])
)
def Contract_scalar_2x2(\
t0_3,t1_3,t2_3,t3_3,\
t0_2,t1_2,t2_2,t3_2,\
t0_1,t1_1,t2_1,t3_1,\
t0_0,t1_0,t2_0,t3_0,\
o1_2,o2_2,\
o1_1,o2_1\
):
##############################
# ./input/input_Lx2Ly2.dat
##############################
# (o1_2*(t1_2.conj()*((t0_2*(t0_3*t1_3))*(t1_2*((t2_2.conj()*((o2_2*t2_2)*(t2_3*(t3_3*t3_2))))*((t1_1*((t1_1.conj()*o1_1)*(t1_0*(t0_0*t0_1))))*(t2_1.conj()*((o2_1*t2_1)*(t2_0*(t3_0*t3_1))))))))))
# cpu_cost= 2.2004e+12 memory= 6.0008e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_2, np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
t0_2, np.tensordot(
t0_3, t1_3, ([1], [0])
), ([1], [0])
), np.tensordot(
t1_2, np.tensordot(
np.tensordot(
t2_2.conj(), np.tensordot(
np.tensordot(
o2_2, t2_2, ([0], [4])
), np.tensordot(
t2_3, np.tensordot(
t3_3, t3_2, ([1], [0])
), ([1], [0])
), ([2, 3], [1, 4])
), ([1, 2, 4], [4, 6, 0])
), np.tensordot(
np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t1_1.conj(), o1_1, ([4], [1])
), np.tensordot(
t1_0, np.tensordot(
t0_0, t0_1, ([1], [0])
), ([1], [0])
), ([0, 3], [5, 2])
), ([0, 3, 4], [6, 4, 2])
), np.tensordot(
t2_1.conj(), np.tensordot(
np.tensordot(
o2_1, t2_1, ([0], [4])
), np.tensordot(
t2_0, np.tensordot(
t3_0, t3_1, ([0], [1])
), ([0], [0])
), ([3, 4], [4, 1])
), ([2, 3, 4], [6, 4, 0])
), ([1, 3, 4], [2, 0, 4])
), ([1, 3, 5], [3, 4, 5])
), ([2, 3], [1, 3])
), ([0, 1, 3, 4], [6, 0, 4, 1])
), ([0, 1, 2, 3], [0, 1, 3, 4])
), ([0, 1], [1, 0])
)
def Contract_scalar_2x3(\
t0_4,t1_4,t2_4,t3_4,\
t0_3,t1_3,t2_3,t3_3,\
t0_2,t1_2,t2_2,t3_2,\
t0_1,t1_1,t2_1,t3_1,\
t0_0,t1_0,t2_0,t3_0,\
o1_3,o2_3,\
o1_2,o2_2,\
o1_1,o2_1\
):
##############################
# ./input/input_Lx2Ly3.dat
##############################
# (o2_1*(t2_1.conj()*((t2_0*(t3_0*t3_1))*(t2_1*((t1_1*((o1_1*t1_1.conj())*(t1_0*(t0_0*t0_1))))*(t3_2*(t2_2.conj()*((o2_2*t2_2)*(t1_2.conj()*((t1_2*o1_2)*(t0_2*((t2_3*((t2_3.conj()*o2_3)*(t2_4*(t3_4*t3_3))))*(t1_3.conj()*((o1_3*t1_3)*(t1_4*(t0_4*t0_3))))))))))))))))
# cpu_cost= 1.22004e+13 memory= 3.02011e+10
# final_bond_order ()
##############################
return np.tensordot(
o2_1, np.tensordot(
t2_1.conj(), np.tensordot(
np.tensordot(
t2_0, np.tensordot(
t3_0, t3_1, ([0], [1])
), ([0], [0])
), np.tensordot(
t2_1, np.tensordot(
np.tensordot(
t1_1, np.tensordot(
np.tensordot(
o1_1, t1_1.conj(), ([1], [4])
), np.tensordot(
t1_0, np.tensordot(
t0_0, t0_1, ([1], [0])
), ([1], [0])
), ([1, 4], [5, 2])
), ([0, 3, 4], [6, 4, 0])
), np.tensordot(
t3_2, np.tensordot(
t2_2.conj(), np.tensordot(
np.tensordot(
o2_2, t2_2, ([0], [4])
), np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
t1_2, o1_2, ([4], [0])
), np.tensordot(
t0_2, np.tensordot(
np.tensordot(
t2_3, np.tensordot(
np.tensordot(
t2_3.conj(), o2_3, ([4], [1])
), np.tensordot(
t2_4, np.tensordot(
t3_4, t3_3, ([1], [0])
), ([1], [0])
), ([1, 2], [2, 5])
), ([1, 2, 4], [4, 6, 2])
), np.tensordot(
t1_3.conj(), np.tensordot(
np.tensordot(
o1_3, t1_3, ([0], [4])
), np.tensordot(
t1_4, np.tensordot(
t0_4, t0_3, ([0], [1])
), ([0], [0])
), ([1, 2], [4, 1])
), ([0, 1, 4], [6, 4, 0])
), ([0, 2, 4], [2, 0, 4])
), ([1], [5])
), ([0, 1], [1, 7])
), ([0, 1, 4], [4, 8, 2])
), ([1, 2], [2, 5])
), ([0, 1, 4], [3, 7, 0])
), ([0, 2, 3], [7, 2, 0])
), ([0, 2, 5], [4, 3, 5])
), ([0, 1], [0, 5])
), ([0, 1, 3, 4], [4, 1, 5, 0])
), ([0, 1, 2, 3], [3, 4, 1, 0])
), ([0, 1], [1, 0])
)
def Contract_scalar_2x4(\
t0_5,t1_5,t2_5,t3_5,\
t0_4,t1_4,t2_4,t3_4,\
t0_3,t1_3,t2_3,t3_3,\
t0_2,t1_2,t2_2,t3_2,\
t0_1,t1_1,t2_1,t3_1,\
t0_0,t1_0,t2_0,t3_0,\
o1_4,o2_4,\
o1_3,o2_3,\
o1_2,o2_2,\
o1_1,o2_1\
):
##############################
# ./input/input_Lx2Ly4.dat
##############################
# (o2_4*(t2_4*((t2_5*(t3_5*t3_4))*(t2_4.conj()*((t1_4*((o1_4*t1_4.conj())*(t0_4*(t0_5*t1_5))))*(t0_3*(t1_3.conj()*((o1_3*t1_3)*(t2_3.conj()*((o2_3*t2_3)*(t3_3*(t0_2*(t1_2.conj()*((o1_2*t1_2)*(t2_2.conj()*((t2_2*o2_2)*(t3_2*((t1_1.conj()*((o1_1*t1_1)*(t1_0*(t0_0*t0_1))))*(t2_1*((o2_1*t2_1.conj())*(t3_1*(t3_0*t2_0))))))))))))))))))))))
# cpu_cost= 2.22004e+13 memory= 3.02032e+10
# final_bond_order ()
##############################
return np.tensordot(
o2_4, np.tensordot(
t2_4, np.tensordot(
np.tensordot(
t2_5, np.tensordot(
t3_5, t3_4, ([1], [0])
), ([1], [0])
), np.tensordot(
t2_4.conj(), np.tensordot(
np.tensordot(
t1_4, np.tensordot(
np.tensordot(
o1_4, t1_4.conj(), ([1], [4])
), np.tensordot(
t0_4, np.tensordot(
t0_5, t1_5, ([1], [0])
), ([1], [0])
), ([1, 2], [2, 5])
), ([0, 1, 4], [4, 6, 0])
), np.tensordot(
t0_3, np.tensordot(
t1_3.conj(), np.tensordot(
np.tensordot(
o1_3, t1_3, ([0], [4])
), np.tensordot(
t2_3.conj(), np.tensordot(
np.tensordot(
o2_3, t2_3, ([0], [4])
), np.tensordot(
t3_3, np.tensordot(
t0_2, np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
o1_2, t1_2, ([0], [4])
), np.tensordot(
t2_2.conj(), np.tensordot(
np.tensordot(
t2_2, o2_2, ([4], [0])
), np.tensordot(
t3_2, np.tensordot(
np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
o1_1, t1_1, ([0], [4])
), np.tensordot(
t1_0, np.tensordot(
t0_0, t0_1, ([1], [0])
), ([1], [0])
), ([1, 4], [4, 1])
), ([0, 3, 4], [6, 4, 0])
), np.tensordot(
t2_1, np.tensordot(
np.tensordot(
o2_1, t2_1.conj(), ([1], [4])
), np.tensordot(
t3_1, np.tensordot(
t3_0, t2_0, ([1], [0])
), ([1], [0])
), ([3, 4], [2, 5])
), ([2, 3, 4], [4, 6, 0])
), ([1, 3, 4], [2, 0, 5])
), ([1], [5])
), ([2, 3], [1, 6])
), ([2, 3, 4], [4, 8, 2])
), ([3, 4], [2, 6])
), ([2, 3, 4], [3, 7, 0])
), ([0, 2, 3], [7, 2, 0])
), ([1], [5])
), ([3, 4], [1, 7])
), ([2, 3, 4], [4, 8, 0])
), ([3, 4], [2, 7])
), ([2, 3, 4], [3, 8, 0])
), ([0, 2, 3], [7, 2, 0])
), ([1, 3, 4], [2, 1, 0])
), ([0, 3], [1, 3])
), ([0, 2, 3, 5], [4, 0, 6, 1])
), ([0, 1, 2, 3], [3, 0, 1, 4])
), ([0, 1], [0, 1])
)
def Contract_scalar_2x5(\
t0_6,t1_6,t2_6,t3_6,\
t0_5,t1_5,t2_5,t3_5,\
t0_4,t1_4,t2_4,t3_4,\
t0_3,t1_3,t2_3,t3_3,\
t0_2,t1_2,t2_2,t3_2,\
t0_1,t1_1,t2_1,t3_1,\
t0_0,t1_0,t2_0,t3_0,\
o1_5,o2_5,\
o1_4,o2_4,\
o1_3,o2_3,\
o1_2,o2_2,\
o1_1,o2_1\
):
##############################
# ./input/input_Lx2Ly5.dat
##############################
# (o1_2*(t1_2*((t0_2*((t1_1*((t1_1.conj()*o1_1)*(t1_0*(t0_0*t0_1))))*(t2_1*((o2_1*t2_1.conj())*(t2_0*(t3_0*t3_1))))))*(t1_2.conj()*(t2_2.conj()*((o2_2*t2_2)*(t3_2*(t0_3*(t1_3.conj()*((t1_3*o1_3)*(t2_3*((t2_3.conj()*o2_3)*(t3_3*(t0_4*(t1_4*((t1_4.conj()*o1_4)*(t2_4*((t2_4.conj()*o2_4)*(t3_4*((t2_5*((o2_5*t2_5.conj())*(t2_6*(t3_6*t3_5))))*(t1_5.conj()*((o1_5*t1_5)*(t0_5*(t0_6*t1_6))))))))))))))))))))))))
# cpu_cost= 3.22004e+13 memory= 4.00042e+10
# final_bond_order ()
##############################
return np.tensordot(
o1_2, np.tensordot(
t1_2, np.tensordot(
np.tensordot(
t0_2, np.tensordot(
np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t1_1.conj(), o1_1, ([4], [1])
), np.tensordot(
t1_0, np.tensordot(
t0_0, t0_1, ([1], [0])
), ([1], [0])
), ([0, 3], [5, 2])
), ([0, 3, 4], [6, 4, 2])
), np.tensordot(
t2_1, np.tensordot(
np.tensordot(
o2_1, t2_1.conj(), ([1], [4])
), np.tensordot(
t2_0, np.tensordot(
t3_0, t3_1, ([0], [1])
), ([0], [0])
), ([3, 4], [5, 2])
), ([2, 3, 4], [6, 4, 0])
), ([1, 3, 4], [0, 2, 4])
), ([0], [2])
), np.tensordot(
t1_2.conj(), np.tensordot(
t2_2.conj(), np.tensordot(
np.tensordot(
o2_2, t2_2, ([0], [4])
), np.tensordot(
t3_2, np.tensordot(
t0_3, np.tensordot(
t1_3.conj(), np.tensordot(
np.tensordot(
t1_3, o1_3, ([4], [0])
), np.tensordot(
t2_3, np.tensordot(
np.tensordot(
t2_3.conj(), o2_3, ([4], [1])
), np.tensordot(
t3_3, np.tensordot(
t0_4, np.tensordot(
t1_4, np.tensordot(
np.tensordot(
t1_4.conj(), o1_4, ([4], [1])
), np.tensordot(
t2_4, np.tensordot(
np.tensordot(
t2_4.conj(), o2_4, ([4], [1])
), np.tensordot(
t3_4, np.tensordot(
np.tensordot(
t2_5, np.tensordot(
np.tensordot(
o2_5, t2_5.conj(), ([1], [4])
), np.tensordot(
t2_6, np.tensordot(
t3_6, t3_5, ([1], [0])
), ([1], [0])
), ([2, 3], [2, 5])
), ([1, 2, 4], [4, 6, 0])
), np.tensordot(
t1_5.conj(), np.tensordot(
np.tensordot(
o1_5, t1_5, ([0], [4])
), np.tensordot(
t0_5, np.tensordot(
t0_6, t1_6, ([1], [0])
), ([1], [0])
), ([1, 2], [1, 4])
), ([0, 1, 4], [4, 6, 0])
), ([0, 2, 4], [2, 0, 5])
), ([0], [2])
), ([1, 2], [4, 2])
), ([1, 2, 4], [5, 4, 2])
), ([1, 2], [5, 2])
), ([1, 2, 4], [7, 3, 2])
), ([1, 2, 3], [7, 0, 2])
), ([0], [5])
), ([1, 2], [7, 2])
), ([1, 2, 4], [8, 4, 2])
), ([1, 2], [6, 0])
), ([1, 2, 4], [8, 4, 2])
), ([1, 2, 3], [7, 2, 0])
), ([0], [5])
), ([2, 3], [6, 1])
), ([1, 2, 4], [8, 4, 0])
), ([1, 2], [6, 0])
), ([0, 2, 4, 5, 6, 7], [7, 0, 1, 5, 3, 6])
), ([0, 1, 2, 3], [0, 4, 3, 1])
), ([0, 1], [0, 1])
)
def Contract_scalar_3x1(\
t0_2,t1_2,t2_2,t3_2,t4_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,\
o1_1,o2_1,o3_1\
):
##############################
# ./input/input_Lx3Ly1.dat
##############################
# (o2_1*(t2_1*((t2_2*(t1_2*(t1_1*((o1_1*t1_1.conj())*(t1_0*(t0_0*(t0_2*t0_1)))))))*(t2_1.conj()*(t2_0*(t3_2*(t3_1*((o3_1*t3_1.conj())*(t3_0*(t4_0*(t4_2*t4_1)))))))))))
# cpu_cost= 1.804e+11 memory= 5.0206e+08
# final_bond_order ()
##############################
return np.tensordot(
o2_1, np.tensordot(
t2_1, np.tensordot(
np.tensordot(
t2_2, np.tensordot(
t1_2, np.tensordot(
t1_1, np.tensordot(
np.tensordot(
o1_1, t1_1.conj(), ([1], [4])
), np.tensordot(
t1_0, np.tensordot(
t0_0, np.tensordot(
t0_2, t0_1, ([0], [1])
), ([1], [1])
), ([1], [0])
), ([1, 4], [5, 2])
), ([0, 3, 4], [6, 4, 0])
), ([0, 2, 3], [5, 0, 2])
), ([0], [0])
), np.tensordot(
t2_1.conj(), np.tensordot(
t2_0, np.tensordot(
t3_2, np.tensordot(
t3_1, np.tensordot(
np.tensordot(
o3_1, t3_1.conj(), ([1], [4])
), np.tensordot(
t3_0, np.tensordot(
t4_0, np.tensordot(
t4_2, t4_1, ([1], [0])
), ([0], [1])
), ([0], [0])
), ([3, 4], [5, 2])
), ([2, 3, 4], [6, 4, 0])
), ([1, 2, 3], [5, 1, 3])
), ([0], [3])
), ([2, 3], [5, 2])
), ([0, 2, 4, 5], [5, 1, 0, 3])
), ([0, 1, 2, 3], [1, 0, 4, 3])
), ([0, 1], [0, 1])
)
def Contract_scalar_3x2(\
t0_3,t1_3,t2_3,t3_3,t4_3,\
t0_2,t1_2,t2_2,t3_2,t4_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,\
o1_2,o2_2,o3_2,\
o1_1,o2_1,o3_1\
):
##############################
# ./input/input_Lx3Ly2.dat
##############################
# (o2_1*(t2_1.conj()*((t2_0*((t1_2.conj()*((o1_2*t1_2)*(t1_3*(t0_3*t0_2))))*(t1_1*((o1_1*t1_1.conj())*(t0_1*(t0_0*t1_0))))))*(t2_1*(t2_2.conj()*((o2_2*t2_2)*(t2_3*((t3_2*((t3_2.conj()*o3_2)*(t4_2*(t4_3*t3_3))))*(t3_1.conj()*((t3_1*o3_1)*(t4_1*(t4_0*t3_0))))))))))))
# cpu_cost= 1.22004e+13 memory= 4.00001e+10
# final_bond_order ()
##############################
return np.tensordot(
o2_1, np.tensordot(
t2_1.conj(), np.tensordot(
np.tensordot(
t2_0, np.tensordot(
np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
o1_2, t1_2, ([0], [4])
), np.tensordot(
t1_3, np.tensordot(
t0_3, t0_2, ([0], [1])
), ([0], [0])
), ([1, 2], [4, 1])
), ([0, 1, 4], [6, 4, 0])
), np.tensordot(
t1_1, np.tensordot(
np.tensordot(
o1_1, t1_1.conj(), ([1], [4])
), np.tensordot(
t0_1, np.tensordot(
t0_0, t1_0, ([0], [1])
), ([0], [0])
), ([1, 4], [2, 5])
), ([0, 3, 4], [4, 6, 0])
), ([1, 3, 5], [2, 0, 4])
), ([1], [5])
), np.tensordot(
t2_1, np.tensordot(
t2_2.conj(), np.tensordot(
np.tensordot(
o2_2, t2_2, ([0], [4])
), np.tensordot(
t2_3, np.tensordot(
np.tensordot(
t3_2, np.tensordot(
np.tensordot(
t3_2.conj(), o3_2, ([4], [1])
), np.tensordot(
t4_2, np.tensordot(
t4_3, t3_3, ([0], [1])
), ([0], [0])
), ([1, 2], [5, 2])
), ([1, 2, 4], [6, 4, 2])
), np.tensordot(
t3_1.conj(), np.tensordot(
np.tensordot(
t3_1, o3_1, ([4], [0])
), np.tensordot(
t4_1, np.tensordot(
t4_0, t3_0, ([1], [0])
), ([1], [0])
), ([2, 3], [1, 4])
), ([2, 3, 4], [4, 6, 2])
), ([1, 3, 4], [3, 1, 4])
), ([1], [2])
), ([2, 3], [1, 3])
), ([1, 2, 4], [4, 5, 0])
), ([1, 2], [3, 6])
), ([0, 1, 3, 4, 5, 6], [8, 1, 3, 5, 6, 0])
), ([0, 1, 2, 3], [1, 3, 4, 0])
), ([0, 1], [1, 0])
)
def Contract_scalar_3x3(\
t0_4,t1_4,t2_4,t3_4,t4_4,\
t0_3,t1_3,t2_3,t3_3,t4_3,\
t0_2,t1_2,t2_2,t3_2,t4_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,\
o1_3,o2_3,o3_3,\
o1_2,o2_2,o3_2,\
o1_1,o2_1,o3_1\
):
##############################
# ./input/input_Lx3Ly3.dat
##############################
# (o3_1*(t3_1.conj()*((t4_1*(t4_0*t3_0))*(t3_1*(t2_0*(t2_1*((t2_1.conj()*o2_1)*((t1_1.conj()*((o1_1*t1_1)*(t0_1*(t0_0*t1_0))))*(t0_2*(t1_2*((t1_2.conj()*o1_2)*(t2_2*((t2_2.conj()*o2_2)*(t3_2*((o3_2*t3_2.conj())*(t4_2*((t1_3*((o1_3*t1_3.conj())*(t0_3*(t0_4*t1_4))))*(t2_3.conj()*((t2_3*o2_3)*(t2_4*(t3_3*((t3_3.conj()*o3_3)*(t3_4*(t4_4*t4_3))))))))))))))))))))))))
# cpu_cost= 1.6102e+15 memory= 3.0002e+12
# final_bond_order ()
##############################
return np.tensordot(
o3_1, np.tensordot(
t3_1.conj(), np.tensordot(
np.tensordot(
t4_1, np.tensordot(
t4_0, t3_0, ([1], [0])
), ([1], [0])
), np.tensordot(
t3_1, np.tensordot(
t2_0, np.tensordot(
t2_1, np.tensordot(
np.tensordot(
t2_1.conj(), o2_1, ([4], [1])
), np.tensordot(
np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
o1_1, t1_1, ([0], [4])
), np.tensordot(
t0_1, np.tensordot(
t0_0, t1_0, ([0], [1])
), ([0], [0])
), ([1, 4], [1, 4])
), ([0, 3, 4], [4, 6, 0])
), np.tensordot(
t0_2, np.tensordot(
t1_2, np.tensordot(
np.tensordot(
t1_2.conj(), o1_2, ([4], [1])
), np.tensordot(
t2_2, np.tensordot(
np.tensordot(
t2_2.conj(), o2_2, ([4], [1])
), np.tensordot(
t3_2, np.tensordot(
np.tensordot(
o3_2, t3_2.conj(), ([1], [4])
), np.tensordot(
t4_2, np.tensordot(
np.tensordot(
t1_3, np.tensordot(
np.tensordot(
o1_3, t1_3.conj(), ([1], [4])
), np.tensordot(
t0_3, np.tensordot(
t0_4, t1_4, ([1], [0])
), ([1], [0])
), ([1, 2], [2, 5])
), ([0, 1, 4], [4, 6, 0])
), np.tensordot(
t2_3.conj(), np.tensordot(
np.tensordot(
t2_3, o2_3, ([4], [0])
), np.tensordot(
t2_4, np.tensordot(
t3_3, np.tensordot(
np.tensordot(
t3_3.conj(), o3_3, ([4], [1])
), np.tensordot(
t3_4, np.tensordot(
t4_4, t4_3, ([1], [0])
), ([1], [0])
), ([1, 2], [2, 5])
), ([1, 2, 4], [4, 6, 2])
), ([1], [4])
), ([1, 2], [1, 3])
), ([1, 2, 4], [4, 6, 2])
), ([0, 2, 5], [2, 0, 4])
), ([0], [7])
), ([2, 3], [9, 2])
), ([1, 2, 4], [10, 4, 0])
), ([1, 2], [8, 2])
), ([1, 2, 4], [10, 3, 2])
), ([1, 2], [8, 2])
), ([1, 2, 4], [9, 3, 2])
), ([1, 2, 3], [9, 0, 2])
), ([0, 2, 4], [2, 1, 0])
), ([0, 1], [0, 4])
), ([0, 1, 4], [3, 5, 2])
), ([1, 2, 3], [4, 1, 3])
), ([0, 1], [1, 3])
), ([0, 1, 3, 4], [6, 0, 3, 1])
), ([0, 1, 2, 3], [3, 4, 0, 1])
), ([0, 1], [1, 0])
)
def Contract_scalar_3x4(\
t0_5,t1_5,t2_5,t3_5,t4_5,\
t0_4,t1_4,t2_4,t3_4,t4_4,\
t0_3,t1_3,t2_3,t3_3,t4_3,\
t0_2,t1_2,t2_2,t3_2,t4_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,\
o1_4,o2_4,o3_4,\
o1_3,o2_3,o3_3,\
o1_2,o2_2,o3_2,\
o1_1,o2_1,o3_1\
):
##############################
# ./input/input_Lx3Ly4.dat
##############################
# (o2_2*(t2_2*((t1_2.conj()*((t1_2*o1_2)*(t0_2*((t3_1.conj()*((t3_1*o3_1)*(t4_1*(t4_0*t3_0))))*(t2_1*((t2_1.conj()*o2_1)*(t2_0*(t1_1*((o1_1*t1_1.conj())*(t0_1*(t0_0*t1_0)))))))))))*(t2_2.conj()*(t3_2*((o3_2*t3_2.conj())*(t4_2*(t0_3*(t1_3.conj()*((t1_3*o1_3)*(t2_3*((t2_3.conj()*o2_3)*(t3_3*((o3_3*t3_3.conj())*(t4_3*((t1_4*((t1_4.conj()*o1_4)*(t0_4*(t0_5*t1_5))))*(t2_4.conj()*((o2_4*t2_4)*(t2_5*(t3_4.conj()*((o3_4*t3_4)*(t4_4*(t4_5*t3_5)))))))))))))))))))))))
# cpu_cost= 3.0102e+15 memory= 5e+12
# final_bond_order ()
##############################
return np.tensordot(
o2_2, np.tensordot(
t2_2, np.tensordot(
np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
t1_2, o1_2, ([4], [0])
), np.tensordot(
t0_2, np.tensordot(
np.tensordot(
t3_1.conj(), np.tensordot(
np.tensordot(
t3_1, o3_1, ([4], [0])
), np.tensordot(
t4_1, np.tensordot(
t4_0, t3_0, ([1], [0])
), ([1], [0])
), ([2, 3], [1, 4])
), ([2, 3, 4], [4, 6, 2])
), np.tensordot(
t2_1, np.tensordot(
np.tensordot(
t2_1.conj(), o2_1, ([4], [1])
), np.tensordot(
t2_0, np.tensordot(
t1_1, np.tensordot(
np.tensordot(
o1_1, t1_1.conj(), ([1], [4])
), np.tensordot(
t0_1, np.tensordot(
t0_0, t1_0, ([0], [1])
), ([0], [0])
), ([1, 4], [2, 5])
), ([0, 3, 4], [4, 6, 0])
), ([1], [5])
), ([0, 3], [6, 2])
), ([0, 3, 4], [6, 4, 2])
), ([0, 2, 5], [3, 1, 4])
), ([0], [7])
), ([0, 3], [1, 8])
), ([0, 3, 4], [4, 10, 2])
), np.tensordot(
t2_2.conj(), np.tensordot(
t3_2, np.tensordot(
np.tensordot(
o3_2, t3_2.conj(), ([1], [4])
), np.tensordot(
t4_2, np.tensordot(
t0_3, np.tensordot(
t1_3.conj(), np.tensordot(
np.tensordot(
t1_3, o1_3, ([4], [0])
), np.tensordot(
t2_3, np.tensordot(
np.tensordot(
t2_3.conj(), o2_3, ([4], [1])
), np.tensordot(
t3_3, np.tensordot(
np.tensordot(
o3_3, t3_3.conj(), ([1], [4])
), np.tensordot(
t4_3, np.tensordot(
np.tensordot(
t1_4, np.tensordot(
np.tensordot(
t1_4.conj(), o1_4, ([4], [1])
), np.tensordot(
t0_4, np.tensordot(
t0_5, t1_5, ([1], [0])
), ([1], [0])
), ([0, 1], [2, 5])
), ([0, 1, 4], [4, 6, 2])
), np.tensordot(
t2_4.conj(), np.tensordot(
np.tensordot(
o2_4, t2_4, ([0], [4])
), np.tensordot(
t2_5, np.tensordot(
t3_4.conj(), np.tensordot(
np.tensordot(
o3_4, t3_4, ([0], [4])
), np.tensordot(
t4_4, np.tensordot(
t4_5, t3_5, ([0], [1])
), ([0], [0])
), ([2, 3], [4, 1])
), ([1, 2, 4], [6, 4, 0])
), ([1], [5])
), ([2, 3], [1, 5])
), ([1, 2, 4], [4, 5, 0])
), ([0, 2, 5], [2, 0, 4])
), ([0], [7])
), ([2, 3], [8, 2])
), ([1, 2, 4], [10, 4, 0])
), ([1, 2], [8, 2])
), ([1, 2, 4], [10, 3, 2])
), ([1, 2], [7, 0])
), ([1, 2, 4], [9, 4, 2])
), ([1, 2, 3], [9, 2, 0])
), ([0], [7])
), ([2, 3], [9, 2])
), ([1, 2, 4], [10, 4, 0])
), ([1, 2], [9, 2])
), ([0, 1, 2, 4, 5, 6, 7, 9], [8, 0, 9, 7, 5, 4, 6, 1])
), ([0, 1, 2, 3], [0, 4, 3, 1])
), ([0, 1], [0, 1])
)
def Contract_scalar_4x1(\
t0_2,t1_2,t2_2,t3_2,t4_2,t5_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,t5_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,t5_0,\
o1_1,o2_1,o3_1,o4_1\
):
##############################
# ./input/input_Lx4Ly1.dat
##############################
# (o1_1*(t1_1.conj()*((t1_2*(t0_2*t0_1))*(t1_1*((t0_0*t1_0)*(t2_0*(t2_1.conj()*((o2_1*t2_1)*(t2_2*(t3_0*(t3_1*((o3_1*t3_1.conj())*(t3_2*(t4_2*(t4_1.conj()*((t4_1*o4_1)*(t4_0*(t5_0*(t5_2*t5_1)))))))))))))))))))
# cpu_cost= 2.404e+11 memory= 4.0617e+08
# final_bond_order ()
##############################
return np.tensordot(
o1_1, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
t1_2, np.tensordot(
t0_2, t0_1, ([0], [1])
), ([0], [0])
), np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t0_0, t1_0, ([0], [1])
), np.tensordot(
t2_0, np.tensordot(
t2_1.conj(), np.tensordot(
np.tensordot(
o2_1, t2_1, ([0], [4])
), np.tensordot(
t2_2, np.tensordot(
t3_0, np.tensordot(
t3_1, np.tensordot(
np.tensordot(
o3_1, t3_1.conj(), ([1], [4])
), np.tensordot(
t3_2, np.tensordot(
t4_2, np.tensordot(
t4_1.conj(), np.tensordot(
np.tensordot(
t4_1, o4_1, ([4], [0])
), np.tensordot(
t4_0, np.tensordot(
t5_0, np.tensordot(
t5_2, t5_1, ([1], [0])
), ([0], [1])
), ([0], [0])
), ([2, 3], [4, 1])
), ([2, 3, 4], [6, 4, 2])
), ([1, 2, 3], [5, 3, 1])
), ([1], [0])
), ([2, 3], [2, 3])
), ([1, 2, 4], [4, 5, 0])
), ([0, 2, 3], [5, 1, 3])
), ([1], [3])
), ([2, 3], [1, 4])
), ([1, 2, 4], [4, 6, 0])
), ([0, 2, 3], [5, 3, 1])
), ([1], [0])
), ([2, 3], [4, 1])
), ([0, 1, 3, 4], [6, 1, 3, 0])
), ([0, 1, 2, 3], [1, 0, 4, 3])
), ([0, 1], [1, 0])
)
def Contract_scalar_4x2(\
t0_3,t1_3,t2_3,t3_3,t4_3,t5_3,\
t0_2,t1_2,t2_2,t3_2,t4_2,t5_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,t5_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,t5_0,\
o1_2,o2_2,o3_2,o4_2,\
o1_1,o2_1,o3_1,o4_1\
):
##############################
# ./input/input_Lx4Ly2.dat
##############################
# (o4_2*(t4_2*((t4_3*(t5_3*t5_2))*(t4_2.conj()*((t4_1.conj()*((t4_1*o4_1)*(t4_0*(t5_0*t5_1))))*(t3_3*(t3_2*((o3_2*t3_2.conj())*(t3_1.conj()*((o3_1*t3_1)*(t3_0*(t2_0*(t2_1*((o2_1*t2_1.conj())*(t2_2.conj()*((o2_2*t2_2)*(t2_3*((t1_1.conj()*((o1_1*t1_1)*(t0_1*(t0_0*t1_0))))*(t1_2.conj()*((o1_2*t1_2)*(t0_2*(t0_3*t1_3))))))))))))))))))))))
# cpu_cost= 2.22004e+13 memory= 3.02032e+10
# final_bond_order ()
##############################
return np.tensordot(
o4_2, np.tensordot(
t4_2, np.tensordot(
np.tensordot(
t4_3, np.tensordot(
t5_3, t5_2, ([1], [0])
), ([1], [0])
), np.tensordot(
t4_2.conj(), np.tensordot(
np.tensordot(
t4_1.conj(), np.tensordot(
np.tensordot(
t4_1, o4_1, ([4], [0])
), np.tensordot(
t4_0, np.tensordot(
t5_0, t5_1, ([0], [1])
), ([0], [0])
), ([2, 3], [4, 1])
), ([2, 3, 4], [6, 4, 2])
), np.tensordot(
t3_3, np.tensordot(
t3_2, np.tensordot(
np.tensordot(
o3_2, t3_2.conj(), ([1], [4])
), np.tensordot(
t3_1.conj(), np.tensordot(
np.tensordot(
o3_1, t3_1, ([0], [4])
), np.tensordot(
t3_0, np.tensordot(
t2_0, np.tensordot(
t2_1, np.tensordot(
np.tensordot(
o2_1, t2_1.conj(), ([1], [4])
), np.tensordot(
t2_2.conj(), np.tensordot(
np.tensordot(
o2_2, t2_2, ([0], [4])
), np.tensordot(
t2_3, np.tensordot(
np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
o1_1, t1_1, ([0], [4])
), np.tensordot(
t0_1, np.tensordot(
t0_0, t1_0, ([0], [1])
), ([0], [0])
), ([1, 4], [1, 4])
), ([0, 3, 4], [4, 6, 0])
), np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
o1_2, t1_2, ([0], [4])
), np.tensordot(
t0_2, np.tensordot(
t0_3, t1_3, ([1], [0])
), ([1], [0])
), ([1, 2], [1, 4])
), ([0, 1, 4], [4, 6, 0])
), ([0, 2, 4], [1, 3, 4])
), ([0], [5])
), ([1, 2], [7, 1])
), ([0, 1, 4], [8, 4, 0])
), ([1, 2], [5, 1])
), ([0, 1, 4], [7, 5, 0])
), ([1, 2, 3], [7, 1, 3])
), ([1], [0])
), ([1, 4], [3, 1])
), ([0, 3, 4], [5, 4, 0])
), ([1, 4], [5, 0])
), ([0, 3, 4], [7, 4, 0])
), ([0, 2, 3], [7, 0, 2])
), ([0, 2, 4], [3, 4, 5])
), ([0, 3], [5, 0])
), ([0, 2, 3, 5], [5, 0, 4, 1])
), ([0, 1, 2, 3], [4, 0, 1, 3])
), ([0, 1], [0, 1])
)
def Contract_scalar_4x3(\
t0_4,t1_4,t2_4,t3_4,t4_4,t5_4,\
t0_3,t1_3,t2_3,t3_3,t4_3,t5_3,\
t0_2,t1_2,t2_2,t3_2,t4_2,t5_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,t5_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,t5_0,\
o1_3,o2_3,o3_3,o4_3,\
o1_2,o2_2,o3_2,o4_2,\
o1_1,o2_1,o3_1,o4_1\
):
##############################
# ./input/input_Lx4Ly3.dat
##############################
# (o1_2*(t1_2*((t0_2*(t1_1*((o1_1*t1_1.conj())*(t0_1*(t0_0*t1_0)))))*(t1_2.conj()*((t1_3*((o1_3*t1_3.conj())*(t0_3*(t0_4*t1_4))))*(t2_4*(t2_3*((t2_3.conj()*o2_3)*(t2_2*((o2_2*t2_2.conj())*(t2_1*((t2_1.conj()*o2_1)*(t2_0*(t3_4*(t3_3.conj()*((o3_3*t3_3)*(t3_2.conj()*((t3_2*o3_2)*(t3_1.conj()*((t3_1*o3_1)*(t3_0*((t4_3.conj()*((o4_3*t4_3)*(t5_3*(t5_4*t4_4))))*(t4_2*((t4_2.conj()*o4_2)*(t5_2*(t4_1.conj()*((t4_1*o4_1)*(t4_0*(t5_0*t5_1)))))))))))))))))))))))))))))
# cpu_cost= 3.0102e+15 memory= 3.0101e+12
# final_bond_order ()
##############################
return np.tensordot(
o1_2, np.tensordot(
t1_2, np.tensordot(
np.tensordot(
t0_2, np.tensordot(
t1_1, np.tensordot(
np.tensordot(
o1_1, t1_1.conj(), ([1], [4])
), np.tensordot(
t0_1, np.tensordot(
t0_0, t1_0, ([0], [1])
), ([0], [0])
), ([1, 4], [2, 5])
), ([0, 3, 4], [4, 6, 0])
), ([0], [4])
), np.tensordot(
t1_2.conj(), np.tensordot(
np.tensordot(
t1_3, np.tensordot(
np.tensordot(
o1_3, t1_3.conj(), ([1], [4])
), np.tensordot(
t0_3, np.tensordot(
t0_4, t1_4, ([1], [0])
), ([1], [0])
), ([1, 2], [2, 5])
), ([0, 1, 4], [4, 6, 0])
), np.tensordot(
t2_4, np.tensordot(
t2_3, np.tensordot(
np.tensordot(
t2_3.conj(), o2_3, ([4], [1])
), np.tensordot(
t2_2, np.tensordot(
np.tensordot(
o2_2, t2_2.conj(), ([1], [4])
), np.tensordot(
t2_1, np.tensordot(
np.tensordot(
t2_1.conj(), o2_1, ([4], [1])
), np.tensordot(
t2_0, np.tensordot(
t3_4, np.tensordot(
t3_3.conj(), np.tensordot(
np.tensordot(
o3_3, t3_3, ([0], [4])
), np.tensordot(
t3_2.conj(), np.tensordot(
np.tensordot(
t3_2, o3_2, ([4], [0])
), np.tensordot(
t3_1.conj(), np.tensordot(
np.tensordot(
t3_1, o3_1, ([4], [0])
), np.tensordot(
t3_0, np.tensordot(
np.tensordot(
t4_3.conj(), np.tensordot(
np.tensordot(
o4_3, t4_3, ([0], [4])
), np.tensordot(
t5_3, np.tensordot(
t5_4, t4_4, ([0], [1])
), ([0], [0])
), ([2, 3], [4, 1])
), ([1, 2, 4], [6, 4, 0])
), np.tensordot(
t4_2, np.tensordot(
np.tensordot(
t4_2.conj(), o4_2, ([4], [1])
), np.tensordot(
t5_2, np.tensordot(
t4_1.conj(), np.tensordot(
np.tensordot(
t4_1, o4_1, ([4], [0])
), np.tensordot(
t4_0, np.tensordot(
t5_0, t5_1, ([0], [1])
), ([0], [0])
), ([2, 3], [4, 1])
), ([2, 3, 4], [6, 4, 2])
), ([1], [5])
), ([2, 3], [2, 4])
), ([2, 3, 4], [4, 7, 2])
), ([1, 3, 4], [3, 1, 4])
), ([0], [7])
), ([2, 3], [9, 1])
), ([2, 3, 4], [10, 4, 2])
), ([2, 3], [8, 3])
), ([2, 3, 4], [10, 4, 2])
), ([3, 4], [8, 3])
), ([2, 3, 4], [9, 4, 0])
), ([1, 2, 3], [9, 3, 1])
), ([0], [7])
), ([2, 3], [8, 2])
), ([2, 3, 4], [10, 4, 2])
), ([3, 4], [8, 3])
), ([2, 3, 4], [10, 4, 0])
), ([2, 3], [8, 3])
), ([2, 3, 4], [10, 4, 2])
), ([1, 2, 3], [9, 1, 3])
), ([0, 2, 5], [1, 2, 0])
), ([1, 2], [1, 4])
), ([0, 2, 4, 5, 6, 7], [4, 0, 6, 1, 7, 8])
), ([0, 1, 2, 3], [0, 3, 4, 1])
), ([0, 1], [0, 1])
)
def Contract_scalar_5x1(\
t0_2,t1_2,t2_2,t3_2,t4_2,t5_2,t6_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,t5_1,t6_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,t5_0,t6_0,\
o1_1,o2_1,o3_1,o4_1,o5_1\
):
##############################
# ./input/input_Lx5Ly1.dat
##############################
# (o2_1*(t2_1.conj()*((t2_2*(t1_0*(t1_1.conj()*((o1_1*t1_1)*(t1_2*(t0_0*(t0_1*t0_2)))))))*(t2_1*(t2_0*(t3_2*(t3_1.conj()*((t3_1*o3_1)*(t3_0*(t4_0*(t4_1.conj()*((o4_1*t4_1)*(t4_2*(t5_0*(t5_1.conj()*((o5_1*t5_1)*(t5_2*(t6_0*(t6_2*t6_1)))))))))))))))))))
# cpu_cost= 3.004e+11 memory= 5.0206e+08
# final_bond_order ()
##############################
return np.tensordot(
o2_1, np.tensordot(
t2_1.conj(), np.tensordot(
np.tensordot(
t2_2, np.tensordot(
t1_0, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
o1_1, t1_1, ([0], [4])
), np.tensordot(
t1_2, np.tensordot(
t0_0, np.tensordot(
t0_1, t0_2, ([1], [0])
), ([1], [0])
), ([0], [3])
), ([1, 2], [4, 1])
), ([0, 1, 4], [6, 4, 0])
), ([1, 2, 3], [5, 3, 1])
), ([0], [3])
), np.tensordot(
t2_1, np.tensordot(
t2_0, np.tensordot(
t3_2, np.tensordot(
t3_1.conj(), np.tensordot(
np.tensordot(
t3_1, o3_1, ([4], [0])
), np.tensordot(
t3_0, np.tensordot(
t4_0, np.tensordot(
t4_1.conj(), np.tensordot(
np.tensordot(
o4_1, t4_1, ([0], [4])
), np.tensordot(
t4_2, np.tensordot(
t5_0, np.tensordot(
t5_1.conj(), np.tensordot(
np.tensordot(
o5_1, t5_1, ([0], [4])
), np.tensordot(
t5_2, np.tensordot(
t6_0, np.tensordot(
t6_2, t6_1, ([1], [0])
), ([0], [1])
), ([1], [1])
), ([2, 3], [1, 4])
), ([1, 2, 4], [4, 6, 0])
), ([0, 2, 3], [5, 3, 1])
), ([1], [3])
), ([2, 3], [1, 5])
), ([1, 2, 4], [4, 6, 0])
), ([0, 2, 3], [5, 3, 1])
), ([0], [0])
), ([2, 3], [4, 1])
), ([2, 3, 4], [5, 4, 2])
), ([1, 2, 3], [5, 3, 1])
), ([0], [3])
), ([2, 3], [5, 1])
), ([0, 1, 3, 5], [5, 1, 3, 0])
), ([0, 1, 2, 3], [1, 0, 4, 3])
), ([0, 1], [1, 0])
)
def Contract_scalar_5x2(\
t0_3,t1_3,t2_3,t3_3,t4_3,t5_3,t6_3,\
t0_2,t1_2,t2_2,t3_2,t4_2,t5_2,t6_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,t5_1,t6_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,t5_0,t6_0,\
o1_2,o2_2,o3_2,o4_2,o5_2,\
o1_1,o2_1,o3_1,o4_1,o5_1\
):
##############################
# ./input/input_Lx5Ly2.dat
##############################
# (o3_1*(t3_1*((t3_0*(t4_0*(t4_1.conj()*((o4_1*t4_1)*(t4_2.conj()*((o4_2*t4_2)*(t4_3*((t5_2*((o5_2*t5_2.conj())*(t6_2*(t6_3*t5_3))))*(t5_1.conj()*((t5_1*o5_1)*(t5_0*(t6_0*t6_1))))))))))))*(t3_1.conj()*(t3_2*((t3_2.conj()*o3_2)*(t3_3*(t2_3*(t2_2*((t2_2.conj()*o2_2)*(t2_1*((o2_1*t2_1.conj())*(t2_0*((t1_2*((t1_2.conj()*o1_2)*(t0_2*(t0_3*t1_3))))*(t1_1*((t1_1.conj()*o1_1)*(t0_1*(t0_0*t1_0))))))))))))))))))
# cpu_cost= 3.22004e+13 memory= 5.00021e+10
# final_bond_order ()
##############################
return np.tensordot(
o3_1, np.tensordot(
t3_1, np.tensordot(
np.tensordot(
t3_0, np.tensordot(
t4_0, np.tensordot(
t4_1.conj(), np.tensordot(
np.tensordot(
o4_1, t4_1, ([0], [4])
), np.tensordot(
t4_2.conj(), np.tensordot(
np.tensordot(
o4_2, t4_2, ([0], [4])
), np.tensordot(
t4_3, np.tensordot(
np.tensordot(
t5_2, np.tensordot(
np.tensordot(
o5_2, t5_2.conj(), ([1], [4])
), np.tensordot(
t6_2, np.tensordot(
t6_3, t5_3, ([0], [1])
), ([0], [0])
), ([2, 3], [5, 2])
), ([1, 2, 4], [6, 4, 0])
), np.tensordot(
t5_1.conj(), np.tensordot(
np.tensordot(
t5_1, o5_1, ([4], [0])
), np.tensordot(
t5_0, np.tensordot(
t6_0, t6_1, ([0], [1])
), ([0], [0])
), ([2, 3], [4, 1])
), ([2, 3, 4], [6, 4, 2])
), ([1, 3, 4], [3, 1, 5])
), ([1], [2])
), ([2, 3], [1, 3])
), ([1, 2, 4], [4, 5, 0])
), ([2, 3], [3, 6])
), ([1, 2, 4], [4, 7, 0])
), ([0, 2, 3], [7, 3, 1])
), ([0], [0])
), np.tensordot(
t3_1.conj(), np.tensordot(
t3_2, np.tensordot(
np.tensordot(
t3_2.conj(), o3_2, ([4], [1])
), np.tensordot(
t3_3, np.tensordot(
t2_3, np.tensordot(
t2_2, np.tensordot(
np.tensordot(
t2_2.conj(), o2_2, ([4], [1])
), np.tensordot(
t2_1, np.tensordot(
np.tensordot(
o2_1, t2_1.conj(), ([1], [4])
), np.tensordot(
t2_0, np.tensordot(
np.tensordot(
t1_2, np.tensordot(
np.tensordot(
t1_2.conj(), o1_2, ([4], [1])
), np.tensordot(
t0_2, np.tensordot(
t0_3, t1_3, ([1], [0])
), ([1], [0])
), ([0, 1], [2, 5])
), ([0, 1, 4], [4, 6, 2])
), np.tensordot(
t1_1, np.tensordot(
np.tensordot(
t1_1.conj(), o1_1, ([4], [1])
), np.tensordot(
t0_1, np.tensordot(
t0_0, t1_0, ([0], [1])
), ([0], [0])
), ([0, 3], [2, 5])
), ([0, 3, 4], [4, 6, 2])
), ([1, 3, 4], [0, 2, 4])
), ([1], [5])
), ([1, 4], [7, 2])
), ([0, 3, 4], [8, 4, 0])
), ([0, 3], [6, 2])
), ([0, 3, 4], [7, 3, 2])
), ([0, 2, 3], [7, 0, 2])
), ([0], [0])
), ([0, 1], [4, 2])
), ([0, 1, 4], [5, 4, 2])
), ([0, 1], [6, 3])
), ([0, 2, 3, 5, 6, 7], [8, 1, 0, 5, 3, 6])
), ([0, 1, 2, 3], [4, 3, 1, 0])
), ([0, 1], [0, 1])
)
def Contract_scalar_6x1(\
t0_2,t1_2,t2_2,t3_2,t4_2,t5_2,t6_2,t7_2,\
t0_1,t1_1,t2_1,t3_1,t4_1,t5_1,t6_1,t7_1,\
t0_0,t1_0,t2_0,t3_0,t4_0,t5_0,t6_0,t7_0,\
o1_1,o2_1,o3_1,o4_1,o5_1,o6_1\
):
##############################
# ./input/input_Lx6Ly1.dat
##############################
# (o3_1*(t3_1.conj()*((t3_0*(t2_2*(t2_1*((t2_1.conj()*o2_1)*(t2_0*(t1_0*(t1_1.conj()*((o1_1*t1_1)*(t1_2*(t0_0*(t0_2*t0_1)))))))))))*(t3_1*(t3_2*(t4_0*(t4_1.conj()*((o4_1*t4_1)*(t4_2*(t5_0*(t5_1*((o5_1*t5_1.conj())*(t5_2*(t6_0*(t6_1.conj()*((t6_1*o6_1)*(t6_2*(t7_0*(t7_2*t7_1)))))))))))))))))))
# cpu_cost= 3.604e+11 memory= 5.041e+08
# final_bond_order ()
##############################
return np.tensordot(
o3_1, np.tensordot(
t3_1.conj(), np.tensordot(
np.tensordot(
t3_0, np.tensordot(
t2_2, np.tensordot(
t2_1, np.tensordot(
np.tensordot(
t2_1.conj(), o2_1, ([4], [1])
), np.tensordot(
t2_0, np.tensordot(
t1_0, np.tensordot(
t1_1.conj(), np.tensordot(
np.tensordot(
o1_1, t1_1, ([0], [4])
), np.tensordot(
t1_2, np.tensordot(
t0_0, np.tensordot(
t0_2, t0_1, ([0], [1])
), ([1], [1])
), ([0], [1])
), ([1, 2], [4, 1])
), ([0, 1, 4], [6, 4, 0])
), ([1, 2, 3], [5, 3, 1])
), ([1], [0])
), ([0, 3], [3, 2])
), ([0, 3, 4], [5, 4, 2])
), ([0, 2, 3], [5, 0, 2])
), ([1], [3])
), np.tensordot(
t3_1, np.tensordot(
t3_2, np.tensordot(
t4_0, np.tensordot(
t4_1.conj(), np.tensordot(
np.tensordot(
o4_1, t4_1, ([0], [4])
), np.tensordot(
t4_2, np.tensordot(
t5_0, np.tensordot(
t5_1, np.tensordot(
np.tensordot(
o5_1, t5_1.conj(), ([1], [4])
), np.tensordot(
t5_2, np.tensordot(
t6_0, np.tensordot(
t6_1.conj(), np.tensordot(
np.tensordot(
t6_1, o6_1, ([4], [0])
), np.tensordot(
t6_2, np.tensordot(
t7_0, np.tensordot(
t7_2, t7_1, ([1], [0])
), ([0], [1])
), ([1], [1])
), ([1, 2], [1, 4])
), ([1, 2, 4], [4, 6, 2])
), ([0, 2, 3], [5, 3, 1])
), ([1], [3])
), ([2, 3], [2, 4])
), ([1, 2, 4], [4, 6, 0])
), ([0, 2, 3], [5, 1, 3])
), ([1], [3])
), ([2, 3], [1, 4])
), ([1, 2, 4], [4, 6, 0])
), ([0, 2, 3], [5, 3, 1])
), ([1], [3])
), ([1, 2], [1, 5])
), ([0, 1, 3, 4], [5, 1, 3, 0])
), ([0, 1, 2, 3], [1, 3, 4, 0])
), ([0, 1], [1, 0])
)
| 58.299304
| 465
| 0.206591
| 8,493
| 100,508
| 2.202284
| 0.0146
| 0.391093
| 0.104951
| 0.177609
| 0.933864
| 0.903978
| 0.852545
| 0.801272
| 0.742462
| 0.698033
| 0
| 0.19087
| 0.644287
| 100,508
| 1,723
| 466
| 58.33314
| 0.332289
| 0.074153
| 0
| 0.841597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013522
| false
| 0
| 0.003863
| 0.013522
| 0.030908
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
4dc5c62659198107fb6e21165320068bfcdcefdf
| 42,946
|
py
|
Python
|
openbook_communities/tests/views/community/posts/test_views.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 164
|
2019-07-29T17:59:06.000Z
|
2022-03-19T21:36:01.000Z
|
openbook_communities/tests/views/community/posts/test_views.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 188
|
2019-03-16T09:53:25.000Z
|
2019-07-25T14:57:24.000Z
|
openbook_communities/tests/views/community/posts/test_views.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 80
|
2019-08-03T17:49:08.000Z
|
2022-02-28T16:56:33.000Z
|
from django.urls import reverse
from faker import Faker
from openbook_common.tests.models import OpenbookAPITestCase
from rest_framework import status
import logging
import json
from openbook_common.tests.helpers import make_user, make_authentication_headers_for_user, \
make_community, make_fake_post_text, make_post_image, make_moderation_category
from openbook_communities.models import Community, CommunityNotificationsSubscription
from openbook_moderation.models import ModeratedObject
from openbook_notifications.models import CommunityNewPostNotification
from openbook_posts.models import Post, PostUserMention
from openbook_notifications.models import Notification
logger = logging.getLogger(__name__)
fake = Faker()
class CommunityPostsAPITest(OpenbookAPITestCase):
def test_can_retrieve_posts_from_public_community(self):
"""
should be able to retrieve the posts for a public community and 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type='P')
community_name = community.name
amount_of_community_posts = 5
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_posts_ids.append(community_member_post.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), len(community_posts_ids))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertIn(response_post_id, community_posts_ids)
def test_can_retrieve_posts_with_max_id_and_count(self):
"""
should be able to retrieve community posts with a max id and count
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type='P')
community_name = community.name
amount_of_community_posts = 10
count = 5
max_id = 6
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_posts_ids.append(community_member_post.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, {
'count': count,
'max_id': max_id
}, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(count, len(response_posts))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertTrue(response_post_id < max_id)
def test_can_retrieve_posts_from_private_community_member_of(self):
"""
should be able to retrieve the posts for a private community member of and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type='P')
community_name = community.name
other_user.invite_user_with_username_to_community_with_name(username=user.username,
community_name=community_name)
user.join_community_with_name(community_name)
amount_of_community_posts = 5
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member = make_user()
other_user.invite_user_with_username_to_community_with_name(username=community_member.username,
community_name=community_name)
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_posts_ids.append(community_member_post.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), len(community_posts_ids))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertIn(response_post_id, community_posts_ids)
def test_cannot_retrieve_posts_from_private_community_not_part_of(self):
"""
should not be able to retrieve the posts for a private community not part of and return 400
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type='T')
other_user.create_community_post(community_name=community.name,
text=make_fake_post_text())
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_cannot_retrieve_soft_deleted_posts_from_community(self):
"""
should not be able to retrieve soft deleted posts of a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type='P')
community_name = community.name
amount_of_community_posts = 5
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_member_post.soft_delete()
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(0, len(response_posts))
def test_cannot_retrieve_moderated_approved_posts_from_community(self):
"""
should not be able to retrieve moderated approved posts of a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator, type='P')
community_moderator = make_user()
community_moderator.join_community_with_name(community_name=community.name)
community_creator.add_moderator_with_username_to_community_with_name(username=community_moderator.username,
community_name=community.name)
community_name = community.name
post_reporter = make_user()
amount_of_community_posts = 5
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
moderation_category = make_moderation_category()
post_reporter.report_post(post=community_member_post, category_id=moderation_category.pk)
moderated_object = ModeratedObject.get_or_create_moderated_object_for_post(post=community_member_post,
category_id=moderation_category.pk)
community_moderator.approve_moderated_object(moderated_object=moderated_object)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(0, len(response_posts))
def test_cannot_retrieve_reported_posts_from_community(self):
"""
should not be able to retrieve reported posts of a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator)
user.join_community_with_name(community_name=community.name)
community_name = community.name
amount_of_community_posts = 5
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
moderation_category = make_moderation_category()
user.report_post(post=community_member_post, category_id=moderation_category.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(0, len(response_posts))
def test_can_retrieve_moderated_rejected_posts_from_community(self):
"""
should be able to retrieve moderated rejected posts of a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator, type='P')
community_moderator = make_user()
community_moderator.join_community_with_name(community_name=community.name)
community_creator.add_moderator_with_username_to_community_with_name(username=community_moderator.username,
community_name=community.name)
community_name = community.name
post_reporter = make_user()
community_posts_ids = []
amount_of_community_posts = 5
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_posts_ids.append(community_member_post.pk)
moderation_category = make_moderation_category()
post_reporter.report_post(post=community_member_post, category_id=moderation_category.pk)
moderated_object = ModeratedObject.get_or_create_moderated_object_for_post(post=community_member_post,
category_id=moderation_category.pk)
community_moderator.reject_moderated_object(moderated_object=moderated_object)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), len(community_posts_ids))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertIn(response_post_id, community_posts_ids)
def test_can_retrieve_moderated_pending_posts_from_community(self):
"""
should be able to retrieve moderated pending posts of a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator, type='P')
community_name = community.name
post_reporter = make_user()
amount_of_community_posts = 5
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_posts_ids.append(community_member_post.pk)
moderation_category = make_moderation_category()
post_reporter.report_post(post=community_member_post, category_id=moderation_category.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), len(community_posts_ids))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertIn(response_post_id, community_posts_ids)
def test_cannot_retrieve_posts_from_community_banned_from(self):
"""
should not be able to retrieve the posts for a community banned from and return 403
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_owner = make_user()
community = make_community(creator=community_owner)
community_owner.create_community_post(community_name=community.name,
text=make_fake_post_text())
user.join_community_with_name(community_name=community.name)
community_owner.ban_user_with_username_from_community_with_name(username=user.username,
community_name=community.name)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_cannot_retrieve_posts_from_blocked_user(self):
"""
should not be able to retrieve the community posts for a blocked user and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_owner = make_user()
community = make_community(creator=community_owner)
user_to_block = make_user()
user_to_block.join_community_with_name(community_name=community.name)
user_to_block.create_community_post(community_name=community.name,
text=make_fake_post_text())
user.block_user_with_id(user_id=user_to_block.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(status.HTTP_200_OK, response.status_code)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), 0)
def test_cannot_retrieve_posts_from_blocking_user(self):
"""
should not be able to retrieve the community posts for a blocking user and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_owner = make_user()
community = make_community(creator=community_owner)
user_to_block = make_user()
user_to_block.join_community_with_name(community_name=community.name)
user_to_block.create_community_post(community_name=community.name,
text=make_fake_post_text())
user_to_block.block_user_with_id(user_id=user.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(status.HTTP_200_OK, response.status_code)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), 0)
def test_can_retrieve_posts_from_blocked_staff_member(self):
"""
should be able to retrieve the community posts for a blocked staff member and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_owner = make_user()
community = make_community(creator=community_owner)
post = community_owner.create_community_post(community_name=community.name,
text=make_fake_post_text())
user.block_user_with_id(user_id=community_owner.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(status.HTTP_200_OK, response.status_code)
response_posts = json.loads(response.content)
self.assertEqual(1, len(response_posts))
response_post = response_posts[0]
response_post_id = response_post.get('id')
self.assertEqual(response_post_id, post.pk)
def test_can_retrieve_posts_from_blocking_staff_member(self):
"""
should be able to retrieve the community posts for a blocking staff member and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_owner = make_user()
community = make_community(creator=community_owner)
post = community_owner.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_owner.block_user_with_id(user_id=user.pk)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(status.HTTP_200_OK, response.status_code)
response_posts = json.loads(response.content)
self.assertEqual(1, len(response_posts))
response_post = response_posts[0]
response_post_id = response_post.get('id')
self.assertEqual(response_post_id, post.pk)
def test_can_retrieve_posts_from_blocking_member_if_staff(self):
"""
should be able to retrieve the community posts of a blocking member if staff and return 200
"""
user = make_user()
community_owner = make_user()
community = make_community(creator=community_owner)
user.join_community_with_name(community_name=community.name)
post = user.create_community_post(community_name=community.name,
text=make_fake_post_text())
user.block_user_with_id(user_id=community_owner.pk)
headers = make_authentication_headers_for_user(community_owner)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(status.HTTP_200_OK, response.status_code)
response_posts = json.loads(response.content)
self.assertEqual(1, len(response_posts))
response_post = response_posts[0]
response_post_id = response_post.get('id')
self.assertEqual(response_post_id, post.pk)
def test_can_retrieve_posts_from_blocked_member_if_staff(self):
"""
should be able to retrieve the community posts of a blocked member if staff and return 200
"""
user = make_user()
community_owner = make_user()
community = make_community(creator=community_owner)
user.join_community_with_name(community_name=community.name)
post = user.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_owner.block_user_with_id(user_id=user.pk)
headers = make_authentication_headers_for_user(community_owner)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(status.HTTP_200_OK, response.status_code)
response_posts = json.loads(response.content)
self.assertEqual(1, len(response_posts))
response_post = response_posts[0]
response_post_id = response_post.get('id')
self.assertEqual(response_post_id, post.pk)
def test_can_create_community_text_post_part_of(self):
"""
should be able to create a post for a community part of and return 201
"""
user = make_user()
community_creator = make_user()
community = make_community(creator=community_creator, type='P')
user.join_community_with_name(community_name=community.name)
url = self._get_url(community_name=community.name)
post_text = make_fake_post_text()
headers = make_authentication_headers_for_user(user)
response = self.client.put(url, {
'text': post_text
}, **headers)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(Post.objects.filter(text=post_text).exists())
def test_can_create_community_image_post_part_of(self):
"""
should be able to create an image post for a community part of and return 201
"""
user = make_user()
community_creator = make_user()
community = make_community(creator=community_creator, type='P')
user.join_community_with_name(community_name=community.name)
url = self._get_url(community_name=community.name)
post_image = make_post_image()
headers = make_authentication_headers_for_user(user)
response = self.client.put(url, {
'image': post_image
}, **headers)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(Post.objects.filter(image__isnull=False).exists())
def test_can_create_community_post_draft(self):
"""
should be able to create an post draft for a community part of and return 201
"""
user = make_user()
community_creator = make_user()
community = make_community(creator=community_creator, type='P')
user.join_community_with_name(community_name=community.name)
url = self._get_url(community_name=community.name)
post_text = make_fake_post_text()
headers = make_authentication_headers_for_user(user)
response = self.client.put(url, {
'text': post_text,
'is_draft': True
}, **headers)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(user.posts.filter(text=post_text, status=Post.STATUS_DRAFT).count(), 1)
def test_cant_create_community_post_not_part_of(self):
"""
should not be able to create a post for a community part of and return 400
"""
user = make_user()
community_creator = make_user()
community = make_community(creator=community_creator, type='P')
url = self._get_url(community_name=community.name)
post_text = make_fake_post_text()
headers = make_authentication_headers_for_user(user)
response = self.client.put(url, {
'text': post_text
}, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(Post.objects.filter(text=post_text).exists())
def test_create_public_community_post_detects_mention(self):
"""
should detect mentions when creating a public community post
"""
user = make_user()
headers = make_authentication_headers_for_user(user=user)
community = make_community()
mentioned_user = make_user()
user.join_community_with_name(community_name=community.name)
post_text = 'Hello @' + mentioned_user.username
data = {
'text': post_text,
}
url = self._get_url(community_name=community.name)
response = self.client.put(url, data, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
post = Post.objects.get(text=post_text, creator_id=user.pk)
self.assertTrue(PostUserMention.objects.filter(post_id=post.pk, user_id=mentioned_user.pk).exists())
def test_create_private_community_post_does_not_detects_mention_if_not_part_of(self):
"""
should not detect mentions when creating a private community post not part of
"""
user = make_user()
headers = make_authentication_headers_for_user(user=user)
community_owner = make_user()
community = make_community(type=Community.COMMUNITY_TYPE_PRIVATE, creator=community_owner)
mentioned_user = make_user()
community_owner.invite_user_with_username_to_community_with_name(username=user.username,
community_name=community.name)
user.join_community_with_name(community_name=community.name)
post_text = 'Hello @' + mentioned_user.username
data = {
'text': post_text,
}
url = self._get_url(community_name=community.name)
response = self.client.put(url, data, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
post = Post.objects.get(text=post_text, creator_id=user.pk)
self.assertFalse(PostUserMention.objects.filter(post_id=post.pk, user_id=mentioned_user.pk).exists())
def test_create_private_community_post_detects_mention_if_part_of(self):
"""
should detect mentions when creating a private community post part of
"""
user = make_user()
headers = make_authentication_headers_for_user(user=user)
community_owner = make_user()
community = make_community(type=Community.COMMUNITY_TYPE_PRIVATE, creator=community_owner)
mentioned_user = make_user()
community_owner.invite_user_with_username_to_community_with_name(username=user.username,
community_name=community.name)
user.join_community_with_name(community_name=community.name)
community_owner.invite_user_with_username_to_community_with_name(username=mentioned_user.username,
community_name=community.name)
mentioned_user.join_community_with_name(community_name=community.name)
post_text = 'Hello @' + mentioned_user.username
data = {
'text': post_text,
}
url = self._get_url(community_name=community.name)
response = self.client.put(url, data, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
post = Post.objects.get(text=post_text, creator_id=user.pk)
self.assertTrue(PostUserMention.objects.filter(post_id=post.pk, user_id=mentioned_user.pk).exists())
def test_create_community_post_notifies_subscribers(self):
"""
should notify subscribers when creating a community post
"""
user = make_user()
community_admin = make_user()
community = make_community(creator=community_admin, type='P')
user.join_community_with_name(community_name=community.name)
user.enable_new_post_notifications_for_community_with_name(community_name=community.name)
headers = make_authentication_headers_for_user(community_admin)
url = self._get_url(community_name=community.name)
data = {
'text': make_fake_post_text()
}
response = self.client.put(url, data, **headers, format='multipart')
community_notifications_subscription = CommunityNotificationsSubscription.objects.get(subscriber=user,
community=community)
self.assertEqual(CommunityNewPostNotification.objects.filter(
community_notifications_subscription_id=community_notifications_subscription.pk,
notification__owner_id=user.pk,
notification__notification_type=Notification.COMMUNITY_NEW_POST).count(),
1)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_community_post_does_not_notify_blocked_subscribers(self):
"""
should NOT notify subscribers who are blocked by creator/have blocked creator when creating a community post
"""
user = make_user()
blocking_user = make_user()
community_admin = make_user()
community = make_community(creator=community_admin, type='P')
user.join_community_with_name(community_name=community.name)
blocking_user.join_community_with_name(community_name=community.name)
user.enable_new_post_notifications_for_community_with_name(community_name=community.name)
blocking_user.enable_new_post_notifications_for_community_with_name(community_name=community.name)
blocking_user.block_user_with_id(user_id=user.pk)
headers = make_authentication_headers_for_user(user)
url = self._get_url(community_name=community.name)
data = {
'text': make_fake_post_text()
}
response = self.client.put(url, data, **headers, format='multipart')
community_notifications_subscription = CommunityNotificationsSubscription.objects.get(subscriber=blocking_user,
community=community)
self.assertFalse(CommunityNewPostNotification.objects.filter(
community_notifications_subscription_id=community_notifications_subscription.pk,
notification__owner_id=blocking_user.pk,
notification__notification_type=Notification.COMMUNITY_NEW_POST).exists())
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_community_post_does_notify_blocked_subscribers_if_admin(self):
"""
should notify subscribers who are blocked by admin/have blocked admin when creating a community post
"""
user = make_user()
community_admin = make_user()
community = make_community(creator=community_admin, type='P')
user.join_community_with_name(community_name=community.name)
user.enable_new_post_notifications_for_community_with_name(community_name=community.name)
community_admin.enable_new_post_notifications_for_community_with_name(community_name=community.name)
community_admin.block_user_with_id(user_id=user.pk)
headers = make_authentication_headers_for_user(user)
url = self._get_url(community_name=community.name)
data = {
'text': make_fake_post_text()
}
response = self.client.put(url, data, **headers, format='multipart')
community_notifications_subscription = CommunityNotificationsSubscription.objects.get(
subscriber=community_admin,
community=community)
self.assertTrue(CommunityNewPostNotification.objects.filter(
community_notifications_subscription_id=community_notifications_subscription.pk,
notification__owner_id=community_admin.pk,
notification__notification_type=Notification.COMMUNITY_NEW_POST).exists())
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_community_post_for_one_community_does_not_notify_admin_for_all_communities_they_are_subscribed_to(
self):
"""
should notify admins who are susbcribers only once for the community in which the post was created
"""
user = make_user()
post_creator = make_user()
user_community = make_community(creator=user, type='P')
community_1 = make_community(creator=post_creator, type='P')
community_2 = make_community(creator=make_user(), type='P')
user.join_community_with_name(community_name=community_1.name)
user.join_community_with_name(community_name=community_2.name)
# susbcribe to all three communities
user.enable_new_post_notifications_for_community_with_name(community_name=community_1.name)
user.enable_new_post_notifications_for_community_with_name(community_name=community_2.name)
user.enable_new_post_notifications_for_community_with_name(community_name=user_community.name)
headers = make_authentication_headers_for_user(post_creator)
# post is created in community_1
url = self._get_url(community_name=community_1.name)
data = {
'text': make_fake_post_text()
}
response = self.client.put(url, data, **headers, format='multipart')
# notification should only be for community susbcribed to
self.assertEqual(CommunityNewPostNotification.objects.filter(
notification__owner_id=user.pk,
notification__notification_type=Notification.COMMUNITY_NEW_POST).count(), 1)
community_notifications_subscription = CommunityNotificationsSubscription.objects.get(subscriber=user,
community=community_1)
retrieved_notifications_subscription = CommunityNewPostNotification.objects.get(
notification__owner_id=user.pk,
notification__notification_type=Notification.COMMUNITY_NEW_POST)
self.assertEqual(retrieved_notifications_subscription.pk, community_notifications_subscription.pk)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def _get_url(self, community_name):
return reverse('community-posts', kwargs={
'community_name': community_name
})
class CommunityClosedPostsAPITest(OpenbookAPITestCase):
def test_can_retrieve_closed_posts_from_community_if_administrator(self):
"""
should be able to retrieve closed posts for a community if administrator
"""
admin = make_user()
community = make_community(creator=admin, type='P')
community_name = community.name
amount_of_community_posts = 5
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_member_post.is_closed = True
community_member_post.save()
community_posts_ids.append(community_member_post.pk)
headers = make_authentication_headers_for_user(admin)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), len(community_posts_ids))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertIn(response_post_id, community_posts_ids)
def test_can_retrieve_closed_posts_from_community_if_moderator(self):
"""
should be able to retrieve closed posts for a community if moderator
"""
moderator = make_user()
admin = make_user()
community = make_community(creator=admin, type='P')
moderator.join_community_with_name(community_name=community.name)
admin.add_moderator_with_username_to_community_with_name(username=moderator.username,
community_name=community.name)
community_name = community.name
amount_of_community_posts = 5
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_member_post.is_closed = True
community_member_post.save()
community_posts_ids.append(community_member_post.pk)
headers = make_authentication_headers_for_user(moderator)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(len(response_posts), len(community_posts_ids))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertIn(response_post_id, community_posts_ids)
def test_can_retrieve_closed_posts_with_max_id_and_count(self):
"""
should be able to retrieve community closed posts with a max id and count if administrator/moderator
"""
admin = make_user()
community = make_community(creator=admin, type='P')
community_name = community.name
amount_of_community_posts = 10
count = 5
max_id = 6
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_member_post.is_closed = True
community_member_post.save()
community_posts_ids.append(community_member_post.pk)
url = self._get_url(community_name=community.name)
headers = make_authentication_headers_for_user(admin)
response = self.client.get(url, {
'count': count,
'max_id': max_id
}, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_posts = json.loads(response.content)
self.assertEqual(count, len(response_posts))
for response_post in response_posts:
response_post_id = response_post.get('id')
self.assertTrue(response_post_id < max_id)
def test_cannot_retrieve_closed_posts_from_community_if_member(self):
"""
should not be able to retrieve closed posts for a community if just a member
"""
admin = make_user()
community = make_community(creator=admin, type='P')
community_name = community.name
community_member = make_user()
community_member.join_community_with_name(community_name=community_name)
amount_of_community_posts = 5
community_posts_ids = []
for i in range(0, amount_of_community_posts):
community_member_post = community_member.create_community_post(community_name=community.name,
text=make_fake_post_text())
community_member_post.is_closed = True
community_member_post.save()
community_posts_ids.append(community_member_post.pk)
headers = make_authentication_headers_for_user(community_member)
url = self._get_url(community_name=community.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def _get_url(self, community_name):
return reverse('closed-community-posts', kwargs={
'community_name': community_name
})
class GetCommunityPostsCountAPITests(OpenbookAPITestCase):
def test_can_retrieve_posts_count(self):
"""
should be able to retrieve the posts count and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator)
community_name = community.name
amount_of_posts = 5
for i in range(0, amount_of_posts):
community_creator.create_community_post(
text=make_fake_post_text(), community_name=community_name
)
url = self._get_url(community_name=community_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertIn('posts_count', parsed_response)
response_posts_count = parsed_response['posts_count']
self.assertEqual(response_posts_count, amount_of_posts)
def _get_url(self, community_name):
return reverse('community-posts-count', kwargs={
'community_name': community_name
})
| 41.057361
| 122
| 0.67373
| 4,915
| 42,946
| 5.502136
| 0.038861
| 0.114891
| 0.101209
| 0.113449
| 0.910402
| 0.894243
| 0.877159
| 0.868617
| 0.854158
| 0.838664
| 0
| 0.006176
| 0.253528
| 42,946
| 1,045
| 123
| 41.096651
| 0.837388
| 0.061403
| 0
| 0.798485
| 0
| 0
| 0.008264
| 0.001087
| 0
| 0
| 0
| 0
| 0.113636
| 1
| 0.05303
| false
| 0
| 0.018182
| 0.004545
| 0.080303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4de81ae03d9c04fb06bb17ba707adda455e1e268
| 355
|
py
|
Python
|
torabot/mods/ehentai/spy/ehentai/test/test_rating.py
|
Answeror/torabot
|
b6260190ec1f0dc8bf3f7ba3512c0522668c59ed
|
[
"MIT"
] | 42
|
2015-01-20T10:45:08.000Z
|
2021-04-17T05:10:27.000Z
|
torabot/mods/ehentai/spy/ehentai/test/test_rating.py
|
Answeror/torabot
|
b6260190ec1f0dc8bf3f7ba3512c0522668c59ed
|
[
"MIT"
] | 4
|
2015-01-23T05:40:44.000Z
|
2016-12-19T03:52:20.000Z
|
torabot/mods/ehentai/spy/ehentai/test/test_rating.py
|
Answeror/torabot
|
b6260190ec1f0dc8bf3f7ba3512c0522668c59ed
|
[
"MIT"
] | 8
|
2015-05-07T03:51:05.000Z
|
2019-03-20T05:40:47.000Z
|
from nose.tools import assert_equal
from ..rating import parse_rating
def test_parse_rating():
assert_equal(parse_rating('background-position:-16px -21px; opacity:1'), 3.5)
assert_equal(parse_rating('background-position:-64px -21px; opacity:0.6'), 0.5)
assert_equal(parse_rating('background-position:0px -1px; opacity:0.66666666666667'), 5)
| 39.444444
| 91
| 0.76338
| 52
| 355
| 5.019231
| 0.461538
| 0.210728
| 0.183908
| 0.252874
| 0.467433
| 0.467433
| 0.314176
| 0
| 0
| 0
| 0
| 0.103448
| 0.101408
| 355
| 8
| 92
| 44.375
| 0.714734
| 0
| 0
| 0
| 0
| 0
| 0.394366
| 0.273239
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
4df1d2c1ac63539ecf3221553ca870ade6aa7908
| 6,722
|
py
|
Python
|
app/chat/message.py
|
nilinykh/slurk
|
5109527947dd231780363df12e01721f53247ca4
|
[
"BSD-3-Clause"
] | null | null | null |
app/chat/message.py
|
nilinykh/slurk
|
5109527947dd231780363df12e01721f53247ca4
|
[
"BSD-3-Clause"
] | null | null | null |
app/chat/message.py
|
nilinykh/slurk
|
5109527947dd231780363df12e01721f53247ca4
|
[
"BSD-3-Clause"
] | null | null | null |
from calendar import timegm
from datetime import datetime
from flask_socketio import emit
from flask_login import login_required, current_user
from .. import socketio
from ..models.user import User
from ..models.room import Room
from ..api.log import log_event
@socketio.on('keypress')
def keypress(message):
last_typing = message.get('last_keypress', None)
if not last_typing:
return
current_user_id = current_user.get_id()
if not current_user_id:
return
for room in current_user.rooms:
user = {
'id': current_user_id,
'name': current_user.name,
}
if last_typing == 0:
emit('start_typing', {'user': user}, room=room.name)
elif last_typing == 3:
emit('stop_typing', {'user': user}, room=room.name)
@socketio.on('text')
@login_required
def message_text(payload):
current_user_id = current_user.get_id()
if not current_user_id:
return False, "invalid session id"
if not current_user.token.permissions.message_text:
return False, "insufficient rights"
if 'msg' not in payload:
return False, 'missing argument: "msg"'
if 'room' not in payload:
return False, 'missing argument: "room"'
broadcast = payload.get('broadcast', False)
if broadcast and not current_user.token.permissions.message_broadcast:
return False, "insufficient rights"
room = Room.query.get(payload['room'])
if not room:
return False, 'Room not found'
if room.read_only:
return False, 'Room "%s" is read-only' % room.label
if 'receiver_id' in payload:
if not current_user.token.permissions.message_text:
return False, 'You are not allowed to send private text messages'
receiver_id = payload['receiver_id']
user = User.query.get(receiver_id)
if not user or not user.session_id:
return False, 'User "%s" does not exist' % receiver_id
receiver = user.session_id
private = True
else:
receiver = room.name
private = False
user = {
'id': current_user_id,
'name': current_user.name,
}
emit('text_message', {
'msg': payload['msg'],
'user': user,
'room': room.name if room else None,
'timestamp': timegm(datetime.now().utctimetuple()),
'private': private,
'html': payload.get('html', False)
}, room=receiver, broadcast=broadcast)
log_event("text_message", current_user, room, data={'receiver': payload['receiver_id'] if private else None,
'message': payload['msg'], 'html': payload.get('html', False)})
for room in current_user.rooms:
emit('stop_typing', {'user': user}, room=room.name)
return True
@socketio.on('message_command')
@login_required
def message_command(payload):
current_user_id = current_user.get_id()
if not current_user_id:
return False, "invalid session id"
if not current_user.token.permissions.message_command:
return False, "insufficient rights"
if 'command' not in payload:
return False, 'missing argument: "command"'
if 'room' not in payload:
return False, 'missing argument: "room"'
broadcast = payload.get('broadcast', False)
if broadcast and not current_user.token.permissions.message_broadcast:
return False, "insufficient rights"
room = Room.query.get(payload['room'])
if not room:
return False, 'Room not found'
if 'receiver_id' in payload:
receiver_id = payload['receiver_id']
user = User.query.get(receiver_id)
if not user or not user.session_id:
return False, 'User "%s" does not exist' % receiver_id
receiver = user.session_id
private = True
else:
receiver = room.name
private = False
user = {
'id': current_user_id,
'name': current_user.name,
}
emit('command', {
'command': payload['command'],
'user': user,
'room': room.name if room else None,
'timestamp': timegm(datetime.now().utctimetuple()),
'private': private,
}, room=receiver, broadcast=broadcast)
log_event("command", current_user, room, data={'receiver': payload['receiver_id'] if private else None, 'command':
payload['command']})
for room in current_user.rooms:
emit('stop_typing', {'user': user}, room=room.name)
return True
@socketio.on('image')
@login_required
def message_image(payload):
current_user_id = current_user.get_id()
if not current_user_id:
return False, "invalid session id"
if not current_user.token.permissions.message_image:
return False, "insufficient rights"
if 'url' not in payload:
return False, 'missing argument: "url"'
if 'room' not in payload:
return False, 'missing argument: "room"'
broadcast = payload.get('broadcast', False)
if broadcast and not current_user.token.permissions.message_broadcast:
return False, "insufficient rights"
room = Room.query.get(payload['room'])
if room.read_only:
return False, 'Room "%s" is read-only' % room.label
if 'receiver_id' in payload:
if not current_user.token.permissions.message_text:
return False, 'You are not allowed to send private image messages'
receiver_id = payload['receiver_id']
user = User.query.get(receiver_id)
if not user or not user.session_id:
return False, 'User "%s" does not exist' % receiver_id
receiver = user.session_id
private = True
else:
receiver = room.name
private = False
user = {
'id': current_user_id,
'name': current_user.name,
}
width = payload['width'] if 'width' in payload else None
height = payload['height'] if 'height' in payload else None
emit('image_message', {
'url': payload['url'],
'user': user,
'width': width,
'height': height,
'room': room.name if room else None,
'timestamp': timegm(datetime.now().utctimetuple()),
'private': private,
}, room=receiver, broadcast=broadcast)
log_event("image_message", current_user, room, data={'receiver': payload['receiver_id'] if private else None,
'url': payload['url'],
'width': width,
'height': height})
for room in current_user.rooms:
emit('stop_typing', {'user': user}, room=room.name)
return True
| 34.121827
| 119
| 0.617673
| 819
| 6,722
| 4.930403
| 0.100122
| 0.098068
| 0.038633
| 0.035661
| 0.804111
| 0.767707
| 0.745666
| 0.717434
| 0.709014
| 0.699604
| 0
| 0.000408
| 0.271348
| 6,722
| 196
| 120
| 34.295918
| 0.82401
| 0
| 0
| 0.705882
| 0
| 0
| 0.169592
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023529
| false
| 0
| 0.047059
| 0
| 0.241176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1511755dae762e902ce90ba11b463f73f35afa94
| 57,992
|
py
|
Python
|
test/test_cli.py
|
MichaelGoodale/opensauce-python
|
cafad071fa1ed675b4e7177b37ed41af94b39c5f
|
[
"Apache-2.0"
] | 38
|
2015-02-10T08:35:50.000Z
|
2022-03-15T10:56:40.000Z
|
test/test_cli.py
|
MichaelGoodale/opensauce-python
|
cafad071fa1ed675b4e7177b37ed41af94b39c5f
|
[
"Apache-2.0"
] | 37
|
2015-09-23T00:17:07.000Z
|
2022-02-24T17:52:56.000Z
|
test/test_cli.py
|
CobiELF/opensauce-python
|
03c278ca92b150188821dadfc9702ff9f939aa4e
|
[
"Apache-2.0"
] | 11
|
2018-08-28T06:41:41.000Z
|
2022-01-21T05:07:40.000Z
|
import contextlib
import os
import sys
import textwrap
import re
import unittest
import numpy as np
from sys import platform
from shutil import copytree
from subprocess import Popen, PIPE
from opensauce.__main__ import CLI
from opensauce.snack import sformant_names
from test.support import TestCase, data_file_path, sound_file_path, py2, parameterize, CLI_output
using_conda = (re.match('.*conda.*', sys.version) is not None) or (re.match('.*Continuum.*', sys.version) is not None)
class TestCommandIO(TestCase):
def _make_file(self, lines):
lines = textwrap.dedent(lines.lstrip('\n'))
tmp = self.tmpdir()
settingsfn = os.path.join(tmp, 'settings')
with open(settingsfn, 'w') as f:
f.write(lines)
return settingsfn
def test_m(self):
here = os.path.dirname(os.path.dirname(__file__))
here = here if here else '.'
p = Popen([sys.executable, '-m', 'opensauce'], cwd=here,
stdout=PIPE,
stderr=PIPE,
universal_newlines=True,
)
out, err = p.communicate()
self.assertEqual(out, '')
if py2:
self.assertIn('too few arguments', err)
else:
self.assertIn('the following arguments are required', err)
self.assertEqual(p.returncode, 2)
def test_ignore_label(self):
lines = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--ignore-label', 'C2',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 585 - 118)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
def test_ignore_multiple_labels(self):
lines = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--ignore-label', 'C2',
'--ignore-label', 'V1',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 585 - 118 - 208)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 0)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
def test_include_empty_labels(self):
lines = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--include-empty-labels',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 2341)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
def test_no_f0_column(self):
lines = CLI_output(self, '\t', [
'--measurements', 'SHR',
'--no-f0-column',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines[0] if 'F0' in x]), 0)
def test_include_f0_column(self):
lines = CLI_output(self, '\t', [
'--measurements', 'SHR',
'--include-f0-column',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 7)
self.assertEqual(len([x for x in lines[0] if 'F0' in x]), 1)
def test_no_formant_cols(self):
lines = CLI_output(self, '\t', [
'--measurements', 'SHR',
'--no-formant-cols',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines[0] if 'pF' in x]), 0)
self.assertEqual(len([x for x in lines[0] if 'pB' in x]), 0)
def test_include_formant_cols(self):
lines = CLI_output(self, '\t', [
'--measurements', 'praatFormants',
'--include-formant-cols',
'--num-formants', '4',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
formant_col_names = ['pF1', 'pF2', 'pF3', 'pF4',
'pB1', 'pB2', 'pB3', 'pB4']
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 13)
self.assertListEqual(lines[0][-8:], formant_col_names)
def test_no_textgrid(self):
lines = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--no-textgrid',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 2341)
self.assertEqual(len(lines[1]), 3)
self.assertEqual(lines[0], ['Filename', 't_ms', 'snackF0'])
self.assertEqual(len([x for x in lines if 'C1' in x]), 0)
self.assertEqual(len([x for x in lines if 'V1' in x]), 0)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
self.assertEqual(len([x for x in lines if 'V2' in x]), 0)
def test_use_textgrid(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--use-textgrid',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
def test_use_textgrid_but_doesnt_exist(self):
lines = CLI_output(self, '\t', [
data_file_path(os.path.join('cli', 'beijing_f3_50_a.wav')),
'--measurements', 'snackF0',
'--use-textgrid',
'--no-output-settings',
])
self.assertEqual(len(lines), 2342)
self.assertEqual(len(lines[0]), 6)
self.assertIn('Found no TextGrid for', lines[1][0])
self.assertEqual(len([x for x in lines if 'C1' in x]), 0)
self.assertEqual(len([x for x in lines if 'V1' in x]), 0)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
self.assertEqual(len([x for x in lines if 'V2' in x]), 0)
def test_no_labels(self):
lines = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--no-labels',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav')
])
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 3)
self.assertEqual(lines[0], ['Filename', 't_ms', 'snackF0'])
self.assertEqual(len([x for x in lines if 'C1' in x]), 0)
self.assertEqual(len([x for x in lines if 'V1' in x]), 0)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
self.assertEqual(len([x for x in lines if 'V2' in x]), 0)
def test_include_labels(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--include-labels',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
def test_multiple_input_files(self):
lines = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--include-empty-labels',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
sound_file_path('beijing_m5_17_c.wav'),
sound_file_path('hmong_f4_24_d.wav'),
])
self.assertEqual(len(lines), 6100)
# The first of these is one less than the number lines in the single
# file equivalent test above because there we were counting the header
# line and here we are not.
self.assertEqual(len([x for x in lines
if 'beijing_f3_50_a.wav' in x]), 2340)
self.assertEqual(len([x for x in lines
if 'beijing_m5_17_c.wav' in x]), 1667)
self.assertEqual(len([x for x in lines
if 'hmong_f4_24_d.wav' in x]), 2092)
def test_at_least_one_input_file_required(self):
with self.assertArgparseError(['too few arguments'], ['required', 'wavfile']):
CLI([])
def test_at_least_one_measurement_required(self):
with self.assertArgparseError(['[Nn]o measurements']):
CLI([sound_file_path('beijing_f3_50_a.wav')])
def test_settings(self):
settingsfn = self._make_file("""
include-empty-labels
ignore-label C2
""")
lines = CLI_output(self, '\t', [
'--settings', settingsfn,
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341 - 118)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
def test_settings_default_file(self):
settingsfn = self._make_file("""
include-empty-labels
""")
with self.patch(CLI, 'settings_locs', [settingsfn]):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
def test_settings_option_invalid_in_settings_file(self):
settingsfn = self._make_file("""
include-empty-labels
settings somefile
ignore-label
""")
with self.assertArgparseError(['settings', settingsfn]):
CLI(['--settings', settingsfn])
def test_measurements_in_settings(self):
settingsfn = self._make_file("""
measurements snackF0
include-empty-labels
""")
lines = CLI_output(self, '\t', [
'--settings', settingsfn,
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 2341)
self.assertIn('snackF0', lines[0])
self.assertEqual(len(lines[1]), 6)
def test_measurements_cant_be_last_line_in_settings(self):
# This is because it would eat filenames if it was and no other options
# were specified on the command line before the filenames.
settingsfn = self._make_file("""
include-empty-labels
measurements snackF0
""")
with self.assertArgparseError(['measurements', settingsfn, 'last']):
CLI(['--settings', settingsfn])
def test_invalid_measurement_rejected(self):
settingsfn = self._make_file("""
measurements thereisnosuchmeasurement
include-empty-labels
""")
with self.assertArgparseError(['thereisnosuchmeasurement']):
CLI(['--settings', settingsfn])
def test_multiple_measurements(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'shrF0', 'snackF0', 'SHR',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-3:], ['shrF0', 'snackF0', 'SHR'])
self.assertEqual(len(lines[1]), 8)
def test_measurements_from_file(self):
measurefn = self._make_file("""
snackF0
shrF0
""")
lines = CLI_output(self, '\t', [
'--default-measurements-file', measurefn,
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-2:], ['snackF0', 'shrF0'])
self.assertEqual(len(lines[1]), 7)
def test_measurements_default_file(self):
measurefn = self._make_file("""
snackF0
shrF0
""")
with self.patch(CLI, 'measurements_locs', [measurefn]):
lines = CLI_output(self, '\t', [
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-2:], ['snackF0', 'shrF0'])
self.assertEqual(len(lines[1]), 7)
def test_invalid_measurements_from_file(self):
measurefn = self._make_file("""
nosuchmeasurement
""")
with self.assertArgparseError(['nosuchmeasurement', '0', measurefn]):
CLI(['-m', measurefn, 'NA'])
def test_output_filepath(self):
tmp = self.tmpdir()
outfile = os.path.join(tmp, 'output.txt')
CLI(['--include-f0-column',
'-o', outfile,
sound_file_path('beijing_f3_50_a.wav')]).process()
with open(outfile) as f:
lines = f.readlines()
self.assertEqual(len(lines), 585)
def test_output_delimiter_tab(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--no-textgrid',
'--output-delimiter', 'tab',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(lines[0], ['Filename', 't_ms', 'snackF0'])
def test_output_delimiter_comma(self):
lines = CLI_output(self, ',', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--no-textgrid',
'--output-delimiter', 'comma',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(lines[0], ['Filename', 't_ms', 'snackF0'])
def test_output_settings_stdout(self):
# Make sure there isn't already a settings file
# If so, remove it
if os.path.isfile('stdout.settings'):
os.remove('stdout.settings')
lines = CLI_output(self, '\t', [
'--include-f0-column',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertTrue(os.path.isfile('stdout.settings'))
# Check generated settings file
with open('stdout.settings') as f:
slines = f.readlines()
self.assertEqual(len(slines), 38)
self.assertEqual(slines[0].strip(), '--measurements snackF0')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 38)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-f0-column')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 0)
# Cleanup
os.remove('stdout.settings')
@unittest.skipIf(platform == 'win32' or platform == 'cygwin',
'No Windows support for pyreaper package')
def test_output_settings_stdout_using_pyreaper(self):
# Make sure there isn't already a settings file
# If so, remove it
if os.path.isfile('stdout.settings'):
os.remove('stdout.settings')
lines = CLI_output(self, '\t', [
'--measurements', 'reaperF0',
'--use-pyreaper',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertTrue(os.path.isfile('stdout.settings'))
# Check generated settings file
with open('stdout.settings') as f:
slines = f.readlines()
self.assertEqual(len(slines), 38)
self.assertEqual(slines[0].strip(), '--measurements reaperF0')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 38)
self.assertEqual(sum([1 for l in slines if l.startswith('--use-pyreaper')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--use-creaper')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 0)
# Cleanup
os.remove('stdout.settings')
def test_output_settings_with_output_filepath(self):
tmp = self.tmpdir()
outfile = os.path.join(tmp, 'output.txt')
lines = CLI_output(self, '\t', [
'--include-f0-column',
'-o', outfile,
sound_file_path('beijing_f3_50_a.wav'),
])
settings_path = outfile.split('.')[0] + '.settings'
self.assertTrue(os.path.isfile(settings_path))
# Check generated settings file
with open(settings_path) as f:
slines = f.readlines()
self.assertEqual(len(slines), 38)
self.assertEqual(slines[0].strip(), '--measurements snackF0')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 38)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-f0-column')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 0)
def test_no_output_settings_stdout(self):
if os.path.isfile('stdout.settings'):
os.remove('stdout.settings')
lines = CLI_output(self, '\t', [
'--include-f0-column',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertFalse(os.path.isfile('stdout.settings'))
def test_no_output_settings_with_output_filepath(self):
tmp = self.tmpdir()
outfile = os.path.join(tmp, 'output.txt')
lines = CLI_output(self, '\t', [
'--include-f0-column',
'-o', outfile,
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
settings_path = outfile.split('.')[0] + '.settings'
self.assertFalse(os.path.isfile(settings_path))
def test_output_settings_path_stdout(self):
tmp = self.tmpdir()
settings_path = os.path.join(tmp, 'output.settings')
lines = CLI_output(self, '\t', [
'--include-f0-column',
'--output-settings-path', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
# Check generated settings file
with open(settings_path) as f:
slines = f.readlines()
self.assertEqual(len(slines), 38)
self.assertEqual(slines[0].strip(), '--measurements snackF0')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 38)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-f0-column')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 0)
def test_output_settings_path_with_output_filepath(self):
tmp = self.tmpdir()
outfile = os.path.join(tmp, 'output.txt')
settings_path = outfile.split('.')[0] + '_unittest.settings'
lines = CLI_output(self, '\t', [
'--include-f0-column',
'-o', outfile,
'--output-settings-path', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertTrue(os.path.isfile(settings_path))
self.assertFalse(os.path.isfile(outfile.split('.')[0] + '.settings'))
# Check generated settings file
with open(settings_path) as f:
slines = f.readlines()
self.assertEqual(len(slines), 38)
self.assertEqual(slines[0].strip(), '--measurements snackF0')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 38)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-f0-column')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 0)
def test_output_settings_check_consistency(self):
# Output from using the generated settings file should match
# the original CLI execution
tmp = self.tmpdir()
settings_path = os.path.join(tmp, 'output.settings')
lines_stdout = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--use-textgrid',
'--no-labels',
'--output-settings-path', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
lines_sfile = CLI_output(self, '\t', [
'--settings', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines_stdout), 585)
self.assertEqual(len(lines_stdout[0]), 3)
# Check generated settings file
with open(settings_path) as f:
slines = f.readlines()
self.assertEqual(len(slines), 38)
self.assertEqual(slines[0].strip(), '--measurements snackF0')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 38)
self.assertEqual(sum([1 for l in slines if l.startswith('--use-textgrid')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--no-labels')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 0)
# Check consistency of output using generated settings file
self.assertEqual(lines_sfile, lines_stdout)
def test_output_settings_check_consistency_alternate_parameters(self):
# Output from using the generated settings file should match
# the original CLI execution
tmp = self.tmpdir()
settings_path = os.path.join(tmp, 'output.settings')
lines_stdout = CLI_output(self, '\t', [
'--measurements', 'praatFormants',
'--include-f0-column',
'--no-textgrid',
'--time-starts-at-frameshift',
'--include-interval-endpoint',
'--kill-octave-jumps',
'--interpolate',
'--smooth',
'--smooth-bandwidth', '10',
'--no-high-pass',
'--use-hilbert-transform',
'--output-settings-path', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
lines_sfile = CLI_output(self, '\t', [
'--settings', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines_stdout), 2342)
self.assertEqual(len(lines_stdout[0]), 11)
# Check generated settings file
with open(settings_path) as f:
slines = f.readlines()
self.assertEqual(len(slines), 44)
self.assertEqual(slines[0].strip(), '--measurements praatFormants snackF0')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 44)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-f0-column')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--no-textgrid')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--time-starts-at-frameshift')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-interval-endpoint')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 2)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth-bandwidth')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--no-high-pass')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--use-hilbert-transform')]), 1)
# Check consistency of output using generated settings file
self.assertEqual(lines_sfile, lines_stdout)
def test_output_settings_check_consistency_more_alternate_parameters(self):
# Output from using the generated settings file should match
# the original CLI execution
tmp = self.tmpdir()
settings_path = os.path.join(tmp, 'output.settings')
lines_stdout = CLI_output(self, '\t', [
'--measurements', 'snackF0',
'--include-formant-cols',
'--use-textgrid',
'--include-empty-labels',
'--output-settings-path', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
lines_sfile = CLI_output(self, '\t', [
'--settings', settings_path,
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines_stdout), 2341)
self.assertEqual(len(lines_stdout[0]), 14)
# Check generated settings file
with open(settings_path) as f:
slines = f.readlines()
self.assertEqual(len(slines), 39)
self.assertEqual(slines[0].strip(), '--measurements snackF0 praatFormants')
self.assertEqual(sum([1 for l in slines if l.startswith('--')]), 39)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-formant-cols')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--use-textgrid')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--include-empty-labels')]), 1)
self.assertEqual(sum([1 for l in slines if l.startswith('--kill-octave-jumps')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--interpolate')]), 0)
self.assertEqual(sum([1 for l in slines if l.startswith('--smooth')]), 0)
# Check consistency of output using generated settings file
self.assertEqual(lines_sfile, lines_stdout)
def test_time_starts_at_zero_no_textgrid(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--no-textgrid',
'--time-starts-at-zero',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(len(lines[1]), 3)
self.assertEqual(lines[0], ['Filename', 't_ms', 'snackF0'])
self.assertEqual(len([x for x in lines if 'C1' in x]), 0)
self.assertEqual(len([x for x in lines if 'V1' in x]), 0)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
self.assertEqual(len([x for x in lines if 'V2' in x]), 0)
self.assertEqual(lines[1][1], '0')
self.assertEqual(lines[-1][1], '2339')
def test_time_starts_at_zero_use_textgrid(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--use-textgrid',
'--include-empty-labels',
'--time-starts-at-zero',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(len(lines[1]), 6)
C1_lines = [x for x in lines if 'C1' in x]
V1_lines = [x for x in lines if 'V1' in x]
C2_lines = [x for x in lines if 'C2' in x]
V2_lines = [x for x in lines if 'V2' in x]
self.assertEqual(len(C1_lines), 100)
self.assertEqual(len(V1_lines), 208)
self.assertEqual(len(C2_lines), 118)
self.assertEqual(len(V2_lines), 158)
self.assertEqual(lines[1][-2], '0')
self.assertEqual(lines[-1][-2], '2339')
self.assertEqual(C1_lines[0][-2], '766')
self.assertEqual(C1_lines[-1][-2], '865')
self.assertEqual(V1_lines[0][-2], '866')
self.assertEqual(V1_lines[-1][-2], '1073')
self.assertEqual(C2_lines[0][-2], '1074')
self.assertEqual(C2_lines[-1][-2], '1191')
self.assertEqual(V2_lines[0][-2], '1192')
self.assertEqual(V2_lines[-1][-2], '1349')
def test_time_starts_at_frameshift_no_textgrid(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--no-textgrid',
'--time-starts-at-frameshift',
'--frame-shift', '1',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(len(lines[1]), 3)
self.assertEqual(lines[0], ['Filename', 't_ms', 'snackF0'])
self.assertEqual(len([x for x in lines if 'C1' in x]), 0)
self.assertEqual(len([x for x in lines if 'V1' in x]), 0)
self.assertEqual(len([x for x in lines if 'C2' in x]), 0)
self.assertEqual(len([x for x in lines if 'V2' in x]), 0)
self.assertEqual(lines[1][1], '1')
self.assertEqual(lines[-1][1], '2340')
def test_time_starts_at_frameshift_use_textgrid(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--use-textgrid',
'--include-empty-labels',
'--time-starts-at-frameshift',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(len(lines[1]), 6)
C1_lines = [x for x in lines if 'C1' in x]
V1_lines = [x for x in lines if 'V1' in x]
C2_lines = [x for x in lines if 'C2' in x]
V2_lines = [x for x in lines if 'V2' in x]
self.assertEqual(len(C1_lines), 100)
self.assertEqual(len(V1_lines), 208)
self.assertEqual(len(C2_lines), 118)
self.assertEqual(len(V2_lines), 158)
self.assertEqual(lines[1][-2], '1')
self.assertEqual(lines[-1][-2], '2340')
self.assertEqual(C1_lines[0][-2], '767')
self.assertEqual(C1_lines[-1][-2], '866')
self.assertEqual(V1_lines[0][-2], '867')
self.assertEqual(V1_lines[-1][-2], '1074')
self.assertEqual(C2_lines[0][-2], '1075')
self.assertEqual(C2_lines[-1][-2], '1192')
self.assertEqual(V2_lines[0][-2], '1193')
self.assertEqual(V2_lines[-1][-2], '1350')
def test_exclude_interval_endpoint(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--use-textgrid',
'--include-empty-labels',
'--time-starts-at-zero',
'--exclude-interval-endpoint',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(len(lines[1]), 6)
C1_lines = [x for x in lines if 'C1' in x]
V1_lines = [x for x in lines if 'V1' in x]
C2_lines = [x for x in lines if 'C2' in x]
V2_lines = [x for x in lines if 'V2' in x]
self.assertEqual(len(C1_lines), 100)
self.assertEqual(len(V1_lines), 208)
self.assertEqual(len(C2_lines), 118)
self.assertEqual(len(V2_lines), 158)
self.assertEqual(lines[1][-2], '0')
self.assertEqual(lines[-1][-2], '2339')
self.assertEqual(C1_lines[0][-2], '766')
self.assertEqual(C1_lines[-1][-2], '865')
self.assertEqual(V1_lines[0][-2], '866')
self.assertEqual(V1_lines[-1][-2], '1073')
self.assertEqual(C2_lines[0][-2], '1074')
self.assertEqual(C2_lines[-1][-2], '1191')
self.assertEqual(V2_lines[0][-2], '1192')
self.assertEqual(V2_lines[-1][-2], '1349')
def test_include_interval_endpoint(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--use-textgrid',
'--include-empty-labels',
'--time-starts-at-zero',
'--include-interval-endpoint',
'--no-output-settings',
])
self.assertEqual(len(lines), 2347)
self.assertEqual(len(lines[1]), 6)
C1_lines = [x for x in lines if 'C1' in x]
V1_lines = [x for x in lines if 'V1' in x]
C2_lines = [x for x in lines if 'C2' in x]
V2_lines = [x for x in lines if 'V2' in x]
self.assertEqual(len(C1_lines), 101)
self.assertEqual(len(V1_lines), 209)
self.assertEqual(len(C2_lines), 119)
self.assertEqual(len(V2_lines), 159)
self.assertEqual(lines[1][-2], '0')
self.assertEqual(lines[-1][-2], '2340')
self.assertEqual(C1_lines[0][-2], '766')
self.assertEqual(C1_lines[-1][-2], '866')
self.assertEqual(V1_lines[0][-2], '866')
self.assertEqual(V1_lines[-1][-2], '1074')
self.assertEqual(C2_lines[0][-2], '1074')
self.assertEqual(C2_lines[-1][-2], '1192')
self.assertEqual(V2_lines[0][-2], '1192')
self.assertEqual(V2_lines[-1][-2], '1350')
def test_default_NaN(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0', 'shrF0', 'SHR',
'--include-empty-labels',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(lines[0][-3:], ['snackF0', 'shrF0', 'SHR'])
self.assertEqual(len(lines[1]), 8)
self.assertEqual(lines[1][-2:], ['NaN', 'NaN'])
self.assertEqual(lines[-1][-3:], ['NaN', 'NaN', 'NaN'])
def test_alternate_NaN(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0', 'shrF0', 'SHR',
'--include-empty-labels',
'--NaN', 'mylabel',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(lines[0][-3:], ['snackF0', 'shrF0', 'SHR'])
self.assertEqual(len(lines[1]), 8)
self.assertEqual(lines[1][-2:], ['mylabel', 'mylabel'])
self.assertEqual(lines[-1][-3:], ['mylabel', 'mylabel', 'mylabel'])
def test_resample_negative_integer(self):
with self.assertArgparseError(['error: argument --resample-freq: -5 is an invalid positive integer value']):
lines = CLI([sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--resample-freq', '-5',
])
def test_resample_output(self):
spath = sound_file_path('beijing_f3_50_a.wav')
lines = CLI_output(self, '\t', [
spath,
'--measurements', 'snackF0',
'--include-empty-labels',
'--resample-freq', '16000',
'--no-output-settings',
])
self.assertEqual(len(lines), 2341)
self.assertEqual(lines[0][-1], 'snackF0')
self.assertEqual(len(lines[1]), 6)
self.assertFalse(os.path.exists(spath.split('.')[0] + '-resample-16000Hz.wav'))
@parameterize
class TestCommandF0(TestCase):
def test_alternate_F0(self):
lines = CLI_output(self, '\t', [
'--F0', 'shrF0',
'--include-F0-column',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['shrF0'])
self.assertEqual(len(lines[1]), 6)
def test_invalid_F0(self):
with self.assertArgparseError(['nosuchpitch']):
CLI(['--f0', 'nosuchpitch'])
def test_invalid_snack_method(self):
with self.assertArgparseError(['nosuchmethod']):
CLI(['--snack-method', 'nosuchmethod'])
def test_invalid_tcl_shell_cmd(self):
with self.assertRaisesRegex(OSError, 'nosuchcmd'):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--snack-method', 'tcl',
'--tcl-cmd', 'nosuchcmd',
])
def test_invalid_praat_f0_method(self):
with self.assertArgparseError(['nosuchmethod']):
CLI(['--praat-f0-method', 'nosuchmethod'])
def test_snackF0_method_tcl(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--snack-method', 'tcl',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['snackF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
@unittest.skipIf((platform == 'darwin') or using_conda,
'Method to call Snack through Tkinter not supported')
def test_snackF0_method_python(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--snack-method', 'python',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['snackF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
@unittest.skipUnless(platform == 'win32' or platform == 'cygwin',
'Requires Windows operating system')
def test_snackF0_method_exe(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackF0',
'--snack-method', 'exe',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['snackF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
def test_praatF0(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'praatF0',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['praatF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
def test_praatF0_empty_output_file(self):
err_msg = 'Praat error -- pitch calculation failed, check input parameters'
with self.assertRaisesRegex(OSError, err_msg):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'praatF0',
'--praat-min-f0', '400',
])
# XXX There is as yet no confirmation that the values being tested against
# here are accurate; these tests just prove the options have *some* effect.
def test_praatF0_alternate_method(self):
lines = CLI_output(self, '\t', [
'--measurements', 'praatF0',
'--praat-f0-method', 'ac',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['praatF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(lines[100],
['beijing_f3_50_a.wav', 'C1', '766.062', '865.632', '865',
'216.620'])
def test_reaperF0_default_parameters(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'reaperF0',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['reaperF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
@unittest.skipIf(platform == 'win32' or platform == 'cygwin',
'No Windows support for pyreaper package')
def test_reaperF0_using_pyreaper(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'reaperF0',
'--use-pyreaper',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['reaperF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
# XXX There is as yet no confirmation that the values being tested against
# here are accurate; these tests just prove the options have *some* effect.
def test_reaperF0_alternate_parameter_vals(self):
lines = CLI_output(self, '\t', [
'--measurements', 'reaperF0',
'--no-high-pass',
'--use-hilbert-transform',
'--inter-mark', '5',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-1:], ['reaperF0'])
self.assertEqual(len(lines[1]), 6)
self.assertEqual(lines[100],
['beijing_f3_50_a.wav', 'C1', '766.062', '865.632', '865',
'220.500'])
line100_prefix = ['beijing_f3_50_a.wav', 'C1', '766.062', '865.632', '865']
def _check_algos(self, algo_list):
self.assertEqual(sorted(algo_list), sorted(CLI._valid_f0), "Tests we have do not match tests we need")
pitch_algo1_params = {
'praatF0': ('praatF0', 585, '224.726'),
'reaperF0': ('reaperF0', 585, '222.727'),
'shrF0': ('shrF0', 585, '222.251'),
'snackF0': ('snackF0', 585, '219.992'),
}
def test_have_default_settings_tests(self):
self._check_algos(self.pitch_algo1_params.keys())
def pitch_algo1_as_default_settings(self, pitch_algo, line_count, v100):
lines = CLI_output(self, '\t', [
'--f0', pitch_algo,
'--include-F0-column',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), line_count)
self.assertEqual(lines[100], self.line100_prefix + [v100])
pitch_algo2_params = CLI._valid_f0
def pitch_algo2_as_frame_shift(self, pitch_algo):
lines = CLI_output(self, '\t', [
'--f0', pitch_algo,
'--include-F0-column',
'--frame-shift', '2',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 293)
pitch_algo3_params = {
'praatF0': ('praatF0', '224.726'),
'reaperF0': ('reaperF0', '222.727'),
'shrF0': ('shrF0', '238.159'),
'snackF0': ('snackF0', '221.386'),
}
# Note that Praat F0 doesn't use window size as a parameter
def test_have_window_size_tests(self):
self._check_algos(self.pitch_algo3_params.keys())
def pitch_algo3_as_window_size(self, pitch_algo, v100):
lines = CLI_output(self, '\t', [
'--f0', pitch_algo,
'--include-F0-column',
'--window-size', '10',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(lines[100], self.line100_prefix + [v100])
pitch_algo4_params = {
'praatF0': ('praatF0', '--praat-min-f0', '229.865'),
'reaperF0': ('reaperF0', '--reaper-min-f0', '222.727'),
'shrF0': ('shrF0', '--shr-min-f0', '222.251'),
'snackF0': ('snackF0', '--snack-min-f0', '0.000'),
}
def test_have_min_f0_tests(self):
self._check_algos(self.pitch_algo4_params.keys())
def pitch_algo4_as_min_f0(self, pitch_algo, min_f0_arg, v100):
lines = CLI_output(self, '\t', [
'--f0', pitch_algo,
'--include-F0-column',
'--no-output-settings',
min_f0_arg, '200',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(lines[100], self.line100_prefix + [v100])
pitch_algo5_params = {
'praatF0': ('praatF0', '--praat-max-f0', '112.061'),
'reaperF0': ('reaperF0', '--reaper-max-f0', '111.364'),
'shrF0': ('shrF0', '--shr-max-f0', '112.172'),
'snackF0': ('snackF0', '--snack-max-f0', '108.907'),
}
def test_have_max_f0_tests(self):
self._check_algos(self.pitch_algo5_params.keys())
def pitch_algo5_as_max_f0(self, pitch_algo, max_f0_arg, v100):
lines = CLI_output(self, '\t', [
'--f0', pitch_algo,
'--include-F0-column',
'--no-output-settings',
max_f0_arg, '200',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(lines[100], self.line100_prefix + [v100])
pitch_algo6_params = {
'praatF0': ('praatF0', 585, '224.755'),
'reaperF0': ('reaperF0', 585, '222.222'),
'shrF0': ('shrF0', 585, '219.583'),
'snackF0': ('snackF0', 585, '216.709'),
}
def test_f0_resample_tests(self):
self._check_algos(self.pitch_algo6_params.keys())
def pitch_algo6_as_resample(self, pitch_algo, line_count, v100):
lines = CLI_output(self, '\t', [
'--f0', pitch_algo,
'--include-F0-column',
'--resample-freq', '16000',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), line_count)
self.assertEqual(lines[100], self.line100_prefix + [v100])
@parameterize
class TestCommandFormants(TestCase):
def test_default_formants(self):
lines = CLI_output(self, '\t', [
'--include-formant-cols',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
formant_col_names = ['pF1', 'pF2', 'pF3', 'pF4',
'pB1', 'pB2', 'pB3', 'pB4']
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 13)
self.assertListEqual(lines[0][-8:], formant_col_names)
def test_alternate_formants(self):
lines = CLI_output(self, '\t', [
'--formants', 'snackFormants',
'--include-formant-cols',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 13)
self.assertEqual(lines[0][-8:], sformant_names)
def test_invalid_formants(self):
with self.assertArgparseError(['nosuchalgorithm']):
CLI(['--formants', 'nosuchalgorithm'])
def test_snackFormants_method_tcl(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackFormants',
'--snack-method', 'tcl',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-8:], sformant_names)
self.assertEqual(len(lines[1]), 13)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
@unittest.skipIf((platform == 'darwin') or using_conda,
'Method to call Snack through Tkinter not supported')
def test_snackFormants_method_python(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackFormants',
'--snack-method', 'python',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-8:], sformant_names)
self.assertEqual(len(lines[1]), 13)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
@unittest.skipUnless(platform == 'win32' or platform == 'cygwin',
'Requires Windows operating system')
def test_snackFormants_method_exe(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'snackFormants',
'--snack-method', 'exe',
'--no-output-settings',
])
self.assertEqual(len(lines), 585)
self.assertEqual(lines[0][-8:], sformant_names)
self.assertEqual(len(lines[1]), 13)
self.assertEqual(len([x for x in lines if 'C1' in x]), 100)
self.assertEqual(len([x for x in lines if 'V1' in x]), 208)
self.assertEqual(len([x for x in lines if 'C2' in x]), 118)
self.assertEqual(len([x for x in lines if 'V2' in x]), 158)
def test_praatFormants_num_formants(self):
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'praatFormants',
'--num-formants', '3',
])
formant_col_names = ['pF1', 'pF2', 'pF3',
'pB1', 'pB2', 'pB3']
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 11)
self.assertListEqual(lines[0][-6:], formant_col_names)
lines = CLI_output(self, '\t', [
sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'praatFormants',
'--num-formants', '3.5',
])
formant_col_names = ['pF1', 'pF2', 'pF3', 'pF4',
'pB1', 'pB2', 'pB3', 'pB4']
self.assertEqual(len(lines), 585)
self.assertEqual(len(lines[1]), 13)
self.assertListEqual(lines[0][-8:], formant_col_names)
with self.assertArgparseError(['error: argument --num-formants: -2 is an invalid positive half integer value']):
lines = CLI([sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'praatFormants',
'--num-formants', '-2',
])
with self.assertArgparseError(['error: argument --num-formants: 1.7 is an invalid positive half integer value']):
lines = CLI([sound_file_path('beijing_f3_50_a.wav'),
'--measurements', 'praatFormants',
'--num-formants', '1.7',
])
line100_prefix = ['beijing_f3_50_a.wav', 'C1', '766.062', '865.632', '865']
def _check_algos(self, algo_list):
self.assertEqual(sorted(algo_list), sorted(CLI._valid_formants), "Tests we have do not match tests we need")
formant_algo1_params = {
'snackFormants': ('snackFormants', 585,
['sF1', 'sF2', 'sF3', 'sF4', 'sB1', 'sB2', 'sB3', 'sB4'],
['573.595', '1658.767', '3277.449', '4422.382'],
['447.585', '139.099', '163.150', '405.460']),
'praatFormants': ('praatFormants', 585,
['pF1', 'pF2', 'pF3', 'pF4', 'pB1', 'pB2', 'pB3', 'pB4'],
['502.944', '1681.375', '3320.657', '4673.634'],
['406.819', '1058.742', '979.097', '646.462']),
}
def test_formant_default_settings_tests(self):
self._check_algos(self.formant_algo1_params.keys())
def formant_algo1_as_default_settings(self, formant_algo, line_count, formant_names, fvals, bvals):
lines = CLI_output(self, '\t', [
'--formants', formant_algo,
'--include-formant-cols',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), line_count)
self.assertEqual(len(lines[0]), 13)
self.assertEqual(lines[0][:5], ['Filename', 'Label', 'seg_Start', 'seg_End', 't_ms'])
self.assertEqual(lines[0][-8:], formant_names)
self.assertEqual(lines[100][:5], self.line100_prefix)
self.assertEqual(lines[100][-8:-4], fvals)
self.assertEqual(lines[100][-4:], bvals)
formant_algo2_params = {
'snackFormants': ('snackFormants', 585,
['sF1', 'sF2', 'sF3', 'sF4', 'sB1', 'sB2', 'sB3', 'sB4'],
['554.578', '1439.016', '3262.044', '4233.911'],
['153.172', '200.412', '426.036', '484.933']),
'praatFormants': ('praatFormants', 585,
['pF1', 'pF2', 'pF3', 'pF4', 'pB1', 'pB2', 'pB3', 'pB4'],
['502.939', '1682.293', '3320.815', '4674.554'],
['407.850', '1063.602', '982.643', '651.033']),
}
def test_formant_resample_tests(self):
self._check_algos(self.formant_algo2_params.keys())
def formant_algo2_as_resample(self, formant_algo, line_count, formant_names, fvals, bvals):
lines = CLI_output(self, '\t', [
'--formants', formant_algo,
'--include-formant-cols',
'--resample-freq', '16000',
'--no-output-settings',
sound_file_path('beijing_f3_50_a.wav'),
])
self.assertEqual(len(lines), line_count)
self.assertEqual(len(lines[0]), 13)
self.assertEqual(lines[0][:5], ['Filename', 'Label', 'seg_Start', 'seg_End', 't_ms'])
self.assertEqual(lines[0][-8:], formant_names)
self.assertEqual(lines[100][:5], self.line100_prefix)
if lines[100][-8:-4] != fvals:
f_rtol = 1e-05
f_atol = 1e-08
print('\nAbsolute equality check for formant values using {} algorithm failed, try equality with rtol={}, atol={}'.format(formant_algo, f_rtol, f_atol))
self.assertAllClose(np.float_(lines[100][-8:-4]), np.float_(fvals), rtol=f_rtol, atol=f_atol)
else:
self.assertEqual(lines[100][-8:-4], fvals)
if lines[100][-4:] != bvals:
b_rtol = 1e-05
b_atol = 1e-08
print('\nAbsolute equality check for bandwidth values {} algorithm failed, try equality with rtol={}, atol={}'.format(formant_algo, b_rtol, b_atol))
self.assertAllClose(np.float_(lines[100][-4:]), np.float_(bvals), rtol=b_rtol, atol=b_atol)
else:
self.assertEqual(lines[100][-4:], bvals)
| 43.73454
| 164
| 0.565181
| 7,242
| 57,992
| 4.378349
| 0.070975
| 0.173615
| 0.116374
| 0.072537
| 0.840482
| 0.805191
| 0.784155
| 0.753532
| 0.737574
| 0.718021
| 0
| 0.054981
| 0.277711
| 57,992
| 1,325
| 165
| 43.767547
| 0.702008
| 0.025072
| 0
| 0.721939
| 0
| 0
| 0.197721
| 0.01991
| 0
| 0
| 0
| 0
| 0.340136
| 1
| 0.07483
| false
| 0.002551
| 0.011054
| 0
| 0.097789
| 0.001701
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12fc36830b4b42e40ce943a1b625275da8304e49
| 2,014
|
py
|
Python
|
vm/objects/float.py
|
a-vorontsov/6ccs3prj
|
366ae0e6332b6811bbe415bd5cf60d4dcfc4a70a
|
[
"MIT"
] | 3
|
2020-12-17T20:56:57.000Z
|
2021-02-19T16:31:08.000Z
|
vm/objects/float.py
|
a-vorontsov/6ccs3prj
|
366ae0e6332b6811bbe415bd5cf60d4dcfc4a70a
|
[
"MIT"
] | null | null | null |
vm/objects/float.py
|
a-vorontsov/6ccs3prj
|
366ae0e6332b6811bbe415bd5cf60d4dcfc4a70a
|
[
"MIT"
] | 1
|
2021-04-19T17:00:56.000Z
|
2021-04-19T17:00:56.000Z
|
from primitive_object import PrimitiveObject
from null import Null
class Float(PrimitiveObject):
__slots__ = ("value",)
_immutable_fields_ = ("value",)
def __init__(self, value):
self.value = value
def get_value(self):
return self.value
def get_string(self):
return str(self.value)
def pprint(self):
print self.get_string()
def add(self, rhs):
assert isinstance(rhs, Float)
result = self.value + rhs.value
return Float(float(result))
def sub(self, rhs):
assert isinstance(rhs, Float)
result = self.value - rhs.value
return Float(float(result))
def mul(self, rhs):
assert isinstance(rhs, Float)
result = self.value * rhs.value
return Float(float(result))
def div(self, rhs):
assert isinstance(rhs, Float)
if rhs.value == 0.0:
raise ValueError
result = self.value / rhs.value
return Float(float(result))
def eq(self, rhs):
assert isinstance(rhs, PrimitiveObject)
if isinstance(rhs, Null):
return False
else:
assert isinstance(rhs, Float)
result = self.value == rhs.value
return result
def neq(self, rhs):
assert isinstance(rhs, PrimitiveObject)
if isinstance(rhs, Null):
return True
else:
assert isinstance(rhs, Float)
result = self.value != rhs.value
return result
def lt(self, rhs):
assert isinstance(rhs, Float)
result = self.value < rhs.value
return result
def le(self, rhs):
assert isinstance(rhs, Float)
result = self.value <= rhs.value
return result
def gt(self, rhs):
assert isinstance(rhs, Float)
result = self.value > rhs.value
return result
def ge(self, rhs):
assert isinstance(rhs, Float)
result = self.value >= rhs.value
return result
| 25.175
| 47
| 0.580933
| 233
| 2,014
| 4.957082
| 0.180258
| 0.109091
| 0.197403
| 0.199134
| 0.738528
| 0.738528
| 0.711688
| 0.711688
| 0.711688
| 0.711688
| 0
| 0.001473
| 0.32572
| 2,014
| 79
| 48
| 25.493671
| 0.849043
| 0
| 0
| 0.412698
| 0
| 0
| 0.004965
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 0
| null | null | 0
| 0.031746
| null | null | 0.031746
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4254a2be2e3f79f432804ce4254e63a242ed7454
| 121
|
py
|
Python
|
test.py
|
csy1993/PythonInterview
|
01667ba0e453c8a62800cdae84bbf8554da70ceb
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
csy1993/PythonInterview
|
01667ba0e453c8a62800cdae84bbf8554da70ceb
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
csy1993/PythonInterview
|
01667ba0e453c8a62800cdae84bbf8554da70ceb
|
[
"Apache-2.0"
] | null | null | null |
'''
@Author: CSY
@Date: 2020-01-28 09:55:04
@LastEditors : CSY
@LastEditTime : 2020-01-28 09:55:17
'''
a=5%2==1
print(a)
| 15.125
| 35
| 0.636364
| 24
| 121
| 3.208333
| 0.708333
| 0.155844
| 0.207792
| 0.25974
| 0.311688
| 0
| 0
| 0
| 0
| 0
| 0
| 0.295238
| 0.132231
| 121
| 8
| 36
| 15.125
| 0.438095
| 0.785124
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
428545737cfefd3316be6497470c6bc6d6c28402
| 13,541
|
py
|
Python
|
infoblox_netmri/api/broker/v3_8_0/system_backup_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 12
|
2016-02-19T12:37:54.000Z
|
2022-03-04T20:11:08.000Z
|
infoblox_netmri/api/broker/v3_8_0/system_backup_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2015-11-12T18:37:00.000Z
|
2021-05-19T07:59:55.000Z
|
infoblox_netmri/api/broker/v3_8_0/system_backup_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2016-01-07T12:04:34.000Z
|
2022-03-31T11:05:41.000Z
|
from ..broker import Broker
class SystemBackupBroker(Broker):
controller = "system_backup"
def create_archive(self, **kwargs):
"""Creates backup of current system database.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include_date: Defines whether include date in file name or not.
:type include_date: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param init: Defines whether to initially create the archive.
:type init: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param async_ind: When false, backup creating will be run synchronously, and the API call will block until it is complete. When true, backup creating id will be returned to use for subsequent calls
:type async_ind: Boolean
**Outputs**
"""
return self.api_request(self._get_method_fullname("create_archive"), kwargs)
def create_archive_status(self, **kwargs):
"""Backup database status.
**Inputs**
**Outputs**
"""
return self.api_request(self._get_method_fullname("create_archive_status"), kwargs)
def ssh_authentication_test(self, **kwargs):
"""Test SSH authentication.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param host: Host name or IP address of the system where archive will be copied.
:type host: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param port: Number of open SSH port on the system where archive will be delivered. Default value is 22 (used if no port number specified).
:type port: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param user_name: Name of the existing user on the system where archive will be copied.
:type user_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:``
:param password: User password on the system where archive will be copied.
:type password: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param use_ssh_keys: Specifies whether to use SSH keys.
:type use_ssh_keys: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param directory: Remote host directory where archive will be stored.
:type directory: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("ssh_authentication_test"), kwargs)
def move_archive_to_remote_host(self, **kwargs):
"""Moves database archive to remote host via SSH. Note that archive will be removed from NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param host: Host name or IP address of the system where archive will be copied. Required if init is set to true.
:type host: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param port: Number of open SSH port on the system where archive will be delivered. Default value is 22 (used if no port number specified).
:type port: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param user_name: Name of the existing user on the system where archive will be copied. Required if init is set to true.
:type user_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:``
:param password: User password on the system where archive will be copied. Required if init is set to true.
:type password: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param use_ssh_keys: Specifies whether to use SSH keys.
:type use_ssh_keys: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param directory: Specifies directory where archive will be stored on remote host. Default is user home directory.
:type directory: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param init: Set to true to initialize moving archive
:type init: Boolean
**Outputs**
"""
return self.api_request(self._get_method_fullname("move_archive_to_remote_host"), kwargs)
def download_archive(self, **kwargs):
"""Download database archive.
**Inputs**
**Outputs**
"""
return self.api_mixed_request(self._get_method_fullname("download_archive"), kwargs)
def download_archive_md5_sum(self, **kwargs):
"""Download database archive md5 checksum.
**Inputs**
**Outputs**
"""
return self.api_mixed_request(self._get_method_fullname("download_archive_md5_sum"), kwargs)
def remove_archive(self, **kwargs):
"""Database archive is stored in temporary directory on NetMRI. It's removed on schedule but you may choose to force remove it.
**Inputs**
**Outputs**
"""
return self.api_request(self._get_method_fullname("remove_archive"), kwargs)
def schedule_archiving(self, **kwargs):
"""Schedule NetMRI database archiving. Archive will be stored on up to 2 systems supporting SCP.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param enable: Specifies whether scheduled archiving should be enabled or not. If parameter is not specified then scheduled archiving is set disabled.
:type enable: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param host_1: Host name or IP address of the system where archive will be copied.
:type host_1: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param port_1: Number of open SSH port on the system where archive will be delivered. Default value is 22 (used if no port number specified).
:type port_1: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param user_name_1: Name of the existing user on the system where archive will be copied.
:type user_name_1: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:``
:param password_1: User password on the system where archive will be copied.
:type password_1: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param use_ssh_keys_1: Specifies whether to use SSH keys.
:type use_ssh_keys_1: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param directory_1: Specifies directory where archive will be stored on remote host. Default is user home directory.
:type directory_1: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param host_2: Host name or IP address of the system where archive will be copied.
:type host_2: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param port_2: Number of open SSH port on the system where archive will be delivered. Default value is 22 (used if no port number specified).
:type port_2: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param user_name_2: Name of the existing user on the system where archive will be copied.
:type user_name_2: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:``
:param password_2: User password on the system where archive will be copied.
:type password_2: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param use_ssh_keys_2: Specifies whether to use SSH keys.
:type use_ssh_keys_2: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param directory_2: Specifies directory where archive will be stored on remote host. Default is user home directory.
:type directory_2: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include_date_1: Specifies whether to put current date into archive file name or not while saving on remote host 1.
:type include_date_1: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include_date_2: Specifies whether to put current date into archive file name or not while saving on remote host 2.
:type include_date_2: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param schedule_cron: Cron schedule string.
:type schedule_cron: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param schedule_json: NetMRI internal parameters generated by 'cronscheduler' form transmitted in json format for setting cron schedule string.
:type schedule_json: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param force_save: If true, changes will be saved even if credentials test failed
:type force_save: Boolean
**Outputs**
"""
return self.api_request(self._get_method_fullname("schedule_archiving"), kwargs)
def upload_archive(self, **kwargs):
"""Upload database archive to NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param archive: NetMRI database archive file.
:type archive: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param md5: NetMRI database archive MD5 checksum file.
:type md5: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("upload_archive"), kwargs)
def restore_database(self, **kwargs):
"""Restores database from the archive which should have been uploaded to NetMRI.
**Inputs**
**Outputs**
"""
return self.api_request(self._get_method_fullname("restore_database"), kwargs)
| 33.517327
| 210
| 0.53482
| 1,454
| 13,541
| 4.882393
| 0.114856
| 0.101423
| 0.065925
| 0.086209
| 0.76391
| 0.736723
| 0.732075
| 0.732075
| 0.732075
| 0.732075
| 0
| 0.00522
| 0.363341
| 13,541
| 403
| 211
| 33.600496
| 0.818235
| 0.661325
| 0
| 0
| 0
| 0
| 0.127959
| 0.060781
| 0
| 0
| 0
| 0
| 0
| 1
| 0.434783
| false
| 0
| 0.043478
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
c40bfc544aadab72d039226c3600579acb273865
| 3,223
|
py
|
Python
|
RecoMuon/MuonIsolation/python/muonPFIsolationCitk_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
RecoMuon/MuonIsolation/python/muonPFIsolationCitk_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
RecoMuon/MuonIsolation/python/muonPFIsolationCitk_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
muonPFNoPileUpIsolation = cms.EDProducer(
"CITKPFIsolationSumProducer",
srcToIsolate = cms.InputTag("muons"),
srcForIsolationCone = cms.InputTag('pfNoPileUpCandidates'),
isolationConeDefinitions = cms.VPSet(
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.3),
VetoThreshold = cms.double(0.0),
VetoConeSize = cms.double(0.0001),
isolateAgainst = cms.string('h+'),
miniAODVertexCodes = cms.vuint32(2,3) ),
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.3),
VetoThreshold = cms.double(0.5),
VetoConeSize = cms.double(0.01),
isolateAgainst = cms.string('h0'),
miniAODVertexCodes = cms.vuint32(2,3) ),
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.3),
VetoThreshold = cms.double(0.5),
VetoConeSize = cms.double(0.01),
isolateAgainst = cms.string('gamma'),
miniAODVertexCodes = cms.vuint32(2,3) ),
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.4),
VetoThreshold = cms.double(0.0),
VetoConeSize = cms.double(0.0001),
isolateAgainst = cms.string('h+'),
miniAODVertexCodes = cms.vuint32(2,3) ),
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.4),
VetoThreshold = cms.double(0.5),
VetoConeSize = cms.double(0.01),
isolateAgainst = cms.string('h0'),
miniAODVertexCodes = cms.vuint32(2,3) ),
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.4),
VetoThreshold = cms.double(0.5),
VetoConeSize = cms.double(0.01),
isolateAgainst = cms.string('gamma'),
miniAODVertexCodes = cms.vuint32(2,3) ),
),
)
muonPFPileUpIsolation = cms.EDProducer(
"CITKPFIsolationSumProducer",
srcToIsolate = cms.InputTag("muons"),
srcForIsolationCone = cms.InputTag('pfPileUpAllChargedParticles'),
isolationConeDefinitions = cms.VPSet(
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.3),
VetoThreshold = cms.double(0.5),
VetoConeSize = cms.double(0.01),
isolateAgainst = cms.string('h+'),
miniAODVertexCodes = cms.vuint32(0,1) ),
cms.PSet( isolationAlgo = cms.string('MuonPFIsolationWithConeVeto'),
coneSize = cms.double(0.4),
VetoThreshold = cms.double(0.5),
VetoConeSize = cms.double(0.01),
isolateAgainst = cms.string('h+'),
miniAODVertexCodes = cms.vuint32(0,1) ),
),
)
| 46.710145
| 76
| 0.56283
| 276
| 3,223
| 6.572464
| 0.144928
| 0.119074
| 0.132304
| 0.101433
| 0.93054
| 0.93054
| 0.93054
| 0.93054
| 0.93054
| 0.93054
| 0
| 0.042727
| 0.317406
| 3,223
| 68
| 77
| 47.397059
| 0.781818
| 0
| 0
| 0.888889
| 0
| 0
| 0.107697
| 0.091558
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015873
| 0
| 0.015873
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c422219348eb3ad66029a57d2ccbc3808d67ef9a
| 2,023
|
py
|
Python
|
0478 Communication Towers.py
|
ansabgillani/binarysearchcomproblems
|
12fe8632f8cbb5058c91a55bae53afa813a3247e
|
[
"MIT"
] | 1
|
2020-12-29T21:17:26.000Z
|
2020-12-29T21:17:26.000Z
|
0478 Communication Towers.py
|
ansabgillani/binarysearchcomproblems
|
12fe8632f8cbb5058c91a55bae53afa813a3247e
|
[
"MIT"
] | null | null | null |
0478 Communication Towers.py
|
ansabgillani/binarysearchcomproblems
|
12fe8632f8cbb5058c91a55bae53afa813a3247e
|
[
"MIT"
] | 4
|
2021-09-09T17:42:43.000Z
|
2022-03-18T04:54:03.000Z
|
class Solution:
def solve(self, matrix):
leaders = {(r,c):(r,c) for r in range(len(matrix)) for c in range(len(matrix[0])) if matrix[r][c] == 1}
followers = {(r,c):[(r,c)] for r in range(len(matrix)) for c in range(len(matrix[0])) if matrix[r][c] == 1}
for r in range(len(matrix)):
latest = None
for c in range(len(matrix[0])):
if matrix[r][c] == 0:
continue
if latest is None:
latest = (r,c)
continue
new_leader = leaders[latest]
old_leader = leaders[r,c]
latest = (r,c)
if new_leader == old_leader:
continue
if len(followers[new_leader]) < len(followers[old_leader]):
new_leader, old_leader = old_leader, new_leader
for follower in followers[old_leader]:
leaders[follower] = new_leader
followers[new_leader].append(follower)
followers[old_leader] = []
for c in range(len(matrix[0])):
latest = None
for r in range(len(matrix)):
if matrix[r][c] == 0:
continue
if latest is None:
latest = (r,c)
continue
new_leader = leaders[latest]
old_leader = leaders[r,c]
latest = (r,c)
if new_leader == old_leader:
continue
if len(followers[new_leader]) < len(followers[old_leader]):
new_leader, old_leader = old_leader, new_leader
for follower in followers[old_leader]:
leaders[follower] = new_leader
followers[new_leader].append(follower)
followers[old_leader] = []
return len(set(leaders.values()))
| 32.629032
| 115
| 0.461691
| 221
| 2,023
| 4.099548
| 0.131222
| 0.030905
| 0.0883
| 0.14128
| 0.898455
| 0.898455
| 0.854305
| 0.831126
| 0.831126
| 0.831126
| 0
| 0.007042
| 0.438458
| 2,023
| 61
| 116
| 33.163934
| 0.790493
| 0
| 0
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0
| 0
| 0.069767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c42f420cf6ab61a15f6be4458145aa7a884aa03d
| 128
|
py
|
Python
|
quarks2cosmos/models/__init__.py
|
dkn16/Quarks2CosmosDataChallenge
|
7ed755b0050bebd1ab4c73b3329389a9cfc6d208
|
[
"MIT"
] | 9
|
2021-07-12T11:46:37.000Z
|
2021-09-03T13:07:56.000Z
|
quarks2cosmos/models/__init__.py
|
dkn16/Quarks2CosmosDataChallenge
|
7ed755b0050bebd1ab4c73b3329389a9cfc6d208
|
[
"MIT"
] | 5
|
2021-07-12T11:49:35.000Z
|
2021-07-15T00:09:23.000Z
|
quarks2cosmos/models/__init__.py
|
dkn16/Quarks2CosmosDataChallenge
|
7ed755b0050bebd1ab4c73b3329389a9cfc6d208
|
[
"MIT"
] | 5
|
2021-07-12T18:10:14.000Z
|
2021-07-18T02:53:44.000Z
|
from quarks2cosmos.models.convdae import SmallUResNet
from quarks2cosmos.models.normalization import SpectralNorm, SNParamsTree
| 42.666667
| 73
| 0.890625
| 13
| 128
| 8.769231
| 0.692308
| 0.298246
| 0.403509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016807
| 0.070313
| 128
| 2
| 74
| 64
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c44a0fc0360ab5d3817742244cea81334ae4379a
| 2,776
|
py
|
Python
|
problems/problem8.py
|
phi95/Project-Euler
|
3c9f251686e91d8b72585c39fe295d8be8ca5303
|
[
"MIT"
] | null | null | null |
problems/problem8.py
|
phi95/Project-Euler
|
3c9f251686e91d8b72585c39fe295d8be8ca5303
|
[
"MIT"
] | null | null | null |
problems/problem8.py
|
phi95/Project-Euler
|
3c9f251686e91d8b72585c39fe295d8be8ca5303
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#The four adjacent digits in the 1000-digit number that have the greatest product are 9 × 9 × 8 × 9 = 5832.
#73167176531330624919225119674426574742355349194934
#96983520312774506326239578318016984801869478851843
#85861560789112949495459501737958331952853208805511
#12540698747158523863050715693290963295227443043557
#66896648950445244523161731856403098711121722383113
#62229893423380308135336276614282806444486645238749
#30358907296290491560440772390713810515859307960866
#70172427121883998797908792274921901699720888093776
#65727333001053367881220235421809751254540594752243
#52584907711670556013604839586446706324415722155397
#53697817977846174064955149290862569321978468622482
#83972241375657056057490261407972968652414535100474
#82166370484403199890008895243450658541227588666881
#16427171479924442928230863465674813919123162824586
#17866458359124566529476545682848912883142607690042
#24219022671055626321111109370544217506941658960408
#07198403850962455444362981230987879927244284909188
#84580156166097919133875499200524063689912560717606
#05886116467109405077541002256983155200055935729725
#71636269561882670428252483600823257530420752963450
#Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this product?
stringList = """
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
"""
stringList = stringList.replace('\n', '').replace('\r', '')
def solve(adjacentNumber):
length = len(stringList)
maxProduct = 0
for i in range(0, length-(adjacentNumber-1)):
temp = int(stringList[i])
for n in range(1, adjacentNumber):
temp *= int(stringList[i+n])
if n == adjacentNumber-1:
if temp > maxProduct:
maxProduct = temp
return maxProduct
| 45.508197
| 126
| 0.882565
| 137
| 2,776
| 17.905109
| 0.481752
| 0.011415
| 0.013045
| 0.015491
| 0.864248
| 0.864248
| 0.864248
| 0.864248
| 0.864248
| 0.864248
| 0
| 0.792549
| 0.081412
| 2,776
| 60
| 127
| 46.266667
| 0.168235
| 0.449207
| 0
| 0
| 0
| 0
| 0.680611
| 0.664011
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
c483d87ebe93621c3ed376687dde32aba63b1356
| 5,317
|
py
|
Python
|
8term/OR/lab7/Tests.py
|
nik-sergeson/bsuir-informatics-labs
|
14805fb83b8e2324580b6253158565068595e804
|
[
"Apache-2.0"
] | null | null | null |
8term/OR/lab7/Tests.py
|
nik-sergeson/bsuir-informatics-labs
|
14805fb83b8e2324580b6253158565068595e804
|
[
"Apache-2.0"
] | null | null | null |
8term/OR/lab7/Tests.py
|
nik-sergeson/bsuir-informatics-labs
|
14805fb83b8e2324580b6253158565068595e804
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from LongestPathTree import LongestPathTree
from sympy import Matrix
class TestExamples(unittest.TestCase):
def test_example(self):
paths = Matrix([[0, 2, 0, 1, 0, 0],
[0, 0, 2, 0, 7, 0],
[0, 0, 0, 0, 0, 8],
[0, 4, 4, 0, 1, 0],
[0, 0, 1, 0, 0, 1],
[0, 0, 0, 0, 0, 0]])
true_result = 21
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task1(self):
paths = Matrix([[0, 5, 6, 4, 1, 0, 0, 0],
[0, 0, 4, 3, 2, 0, 0, 0],
[0, 0, 0, 0, 5, 0, 3, 0],
[0, 0, 0, 0, 0, 4, 7, 3],
[0, 0, 0, 0, 0, 0, 0, 4],
[0, 0, 0, 0, 0, 0, 2, 5],
[0, 0, 0, 0, 2, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0]])
true_result = 21
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task2(self):
paths = Matrix([[0, 3, 4, 5, 3, 0, 0],
[0, 0, 0, 2, 0, 0, 0],
[0, 0, 0, 6, 0, 3, 0],
[0, 0, 0, 0, 4, 1, 4],
[0, 0, 0, 0, 0, 2, 5],
[0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0]])
true_result = 19
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task3(self):
paths = Matrix([[0, 4, 1, 3, 0, 2, 7, 0],
[0, 0, 1, 5, 0, 0, 0, 0],
[0, 0, 0, 4, 3, 5, 0, 0],
[0, 0, 0, 0, 2, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 3, 1],
[0, 0, 0, 4, 0, 0, 2, 7],
[0, 0, 0, 0, 0, 0, 0, 6],
[0, 0, 0, 0, 0, 0, 0, 0]])
true_result = 25
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task4(self):
paths = Matrix([[0, 3, 4, 6, 2, 0, 0, 0],
[0, 0, 0, 5, 1, 0, 0, 0],
[0, 3, 0, 2, 0, 6, 0, 0],
[0, 0, 0, 0, 4, 2, 7, 0],
[0, 0, 0, 0, 0, 3, 7, 1],
[0, 0, 0, 0, 0, 0, 1, 4],
[0, 0, 0, 0, 0, 0, 0, 6],
[0, 0, 0, 0, 0, 0, 0, 0]])
true_result = 29
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task5(self):
paths = Matrix([[0, 7, 9, 6, 0, 3, 0],
[0, 0, 0, 0, 0, 6, 0],
[0, 4, 0, 0, 3, 1, 4],
[0, 2, 1, 0, 8, 0, 0],
[0, 0, 0, 0, 0, 5, 1],
[0, 0, 0, 0, 0, 0, 3],
[0, 0, 0, 0, 0, 0, 0]])
true_result = 22
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task6(self):
paths = Matrix([[0, 6, 5, 0, 1, 4, 0, 0, 0],
[0, 0, 2, 0, 9, 3, 0, 0, 0],
[0, 0, 0, 10, 1, 0, 2, 0, 5],
[0, 0, 0, 0, 0, 0, 1, 7, 3],
[0, 0, 0, 7, 0, 6, 3, 0, 0],
[0, 0, 0, 5, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 8, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2],
[0, 0, 0, 0, 0, 0, 0, 0, 0]])
true_result = 37
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task7(self):
paths = Matrix([[0, 7, 0, 4, 4, 0, 0, 0, 0],
[0, 0, 2, 5, 0, 0, 0, 0, 0],
[0, 0, 0, 6, 0, 1, 0, 7, 0],
[0, 0, 0, 0, 7, 4, 0, 0, 0],
[0, 0, 0, 0, 0, 9, 3, 0, 0],
[0, 0, 0, 0, 0, 0, 10, 0, 5],
[0, 0, 0, 0, 0, 0, 0, 0, 8],
[0, 0, 0, 0, 0, 0, 0, 0, 3],
[0, 0, 0, 0, 0, 0, 0, 0, 0]])
true_result = 49
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
def test_task8(self):
paths = Matrix([[0, 7, 2, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 9, 5, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 4, 3, 0, 0],
[0, 0, 4, 0, 3, 5, 0, 7, 0],
[0, 0, 0, 0, 0, 10, 0, 4, 0],
[0, 0, 0, 0, 0, 0, 7, 0, 4],
[0, 0, 0, 0, 0, 0, 0, 0, 6],
[0, 0, 0, 0, 0, 8, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0]])
true_result = 44
lpt = LongestPathTree(paths)
result = lpt.solve()[0][-1, 0]
self.assertEquals(result, true_result)
if __name__ == "__main__":
unittest.main()
| 39.095588
| 53
| 0.326876
| 795
| 5,317
| 2.142138
| 0.061635
| 0.380505
| 0.454492
| 0.46741
| 0.826189
| 0.788608
| 0.745743
| 0.725778
| 0.665884
| 0.603053
| 0
| 0.232688
| 0.494828
| 5,317
| 135
| 54
| 39.385185
| 0.40134
| 0
| 0
| 0.319672
| 0
| 0
| 0.001505
| 0
| 0
| 0
| 0
| 0
| 0.07377
| 1
| 0.07377
| false
| 0
| 0.02459
| 0
| 0.106557
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6723ade03a2b10264ce6b5c3068e600480331e21
| 28,045
|
py
|
Python
|
sensor/models.py
|
kwarodom/mib_ui_data_analytics
|
a0bc0b30ada1622e00dff41797bd07ea76d7c422
|
[
"Unlicense"
] | null | null | null |
sensor/models.py
|
kwarodom/mib_ui_data_analytics
|
a0bc0b30ada1622e00dff41797bd07ea76d7c422
|
[
"Unlicense"
] | null | null | null |
sensor/models.py
|
kwarodom/mib_ui_data_analytics
|
a0bc0b30ada1622e00dff41797bd07ea76d7c422
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
# Authors: Kruthika Rathinavel
# Version: 2.0
# Email: kruthika@vt.edu
# Created: "2014-10-13 18:45:40"
# Updated: "2015-02-13 15:06:41"
# Copyright © 2014 by Virginia Polytechnic Institute and State University
# All rights reserved
#
# Virginia Polytechnic Institute and State University (Virginia Tech) owns the copyright for the BEMOSS software and
# and its associated documentation ("Software") and retains rights to grant research rights under patents related to
# the BEMOSS software to other academic institutions or non-profit research institutions.
# You should carefully read the following terms and conditions before using this software.
# Your use of this Software indicates your acceptance of this license agreement and all terms and conditions.
#
# You are hereby licensed to use the Software for Non-Commercial Purpose only. Non-Commercial Purpose means the
# use of the Software solely for research. Non-Commercial Purpose excludes, without limitation, any use of
# the Software, as part of, or in any way in connection with a product or service which is sold, offered for sale,
# licensed, leased, loaned, or rented. Permission to use, copy, modify, and distribute this compilation
# for Non-Commercial Purpose to other academic institutions or non-profit research institutions is hereby granted
# without fee, subject to the following terms of this license.
#
# Commercial Use: If you desire to use the software for profit-making or commercial purposes,
# you agree to negotiate in good faith a license with Virginia Tech prior to such profit-making or commercial use.
# Virginia Tech shall have no obligation to grant such license to you, and may grant exclusive or non-exclusive
# licenses to others. You may contact the following by email to discuss commercial use:: vtippatents@vtip.org
#
# Limitation of Liability: IN NO EVENT WILL VIRGINIA TECH, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE
# THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
# CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
# LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE
# OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF VIRGINIA TECH OR OTHER PARTY HAS BEEN ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGES.
#
# For full terms and conditions, please visit https://bitbucket.org/bemoss/bemoss_os.
#
# Address all correspondence regarding this license to Virginia Tech's electronic mail address: vtippatents@vtip.org
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from dashboard.models import Building_Zone, DeviceMetadata
#Occupancy Sensor Data
class OccupancySensor(models.Model):
occupancy_sensor = models.ForeignKey(DeviceMetadata, max_length=50, primary_key=True)
space_occupied = models.NullBooleanField(null=True, blank=True)
ip_address = models.IPAddressField(null=True, blank=True)
nickname = models.CharField(max_length=30, null=True, blank=True)
zone = models.ForeignKey(Building_Zone, null=True, blank=True)
network_status = models.CharField(max_length=7, null=True, blank=True)
other_parameters = models.CharField(max_length=200, null=True, blank=True)
last_scanned_time = models.DateTimeField(null=True, blank=True)
last_offline_time = models.DateTimeField(null=True, blank=True)
class Meta:
db_table = "occupancy_sensor"
def __unicode__(self):
return self.occupancy_sensor_id
def data_as_json(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.occupancy_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.occupancy_sensor_id,
space_occupied=self.space_occupied,
zone=zone_req,
nickname=self.nickname.encode('utf-8').title(),
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
identifiable=metadata['identifiable'],
bemoss=metadata['bemoss'])
def data_side_nav(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.occupancy_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.occupancy_sensor_id,
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize())
def device_status(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.occupancy_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.occupancy_sensor_id,
nickname=self.nickname.encode('utf-8').title(),
device_model=metadata['device_model'],
date_added=metadata['date_added'],
zone=zone_req,
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time,
last_offline=self.last_offline_time)
def data_dashboard(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.occupancy_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.occupancy_sensor_id,
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
date_added=metadata['date_added'],
identifiable=metadata['identifiable'],
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time)
#Ambient Light Sensor Data
class AmbientLightSensor(models.Model):
ambient_light_sensor = models.ForeignKey(DeviceMetadata, max_length=50, primary_key=True)
illuminance = models.IntegerField(null=True, blank=True)
ip_address = models.IPAddressField(null=True, blank=True)
nickname = models.CharField(max_length=30, null=True, blank=True)
zone = models.ForeignKey(Building_Zone, null=True, blank=True)
network_status = models.CharField(max_length=7, null=True, blank=True)
other_parameters = models.CharField(max_length=200, null=True, blank=True)
last_scanned_time = models.DateTimeField(null=True, blank=True)
last_offline_time = models.DateTimeField(null=True, blank=True)
class Meta:
db_table = "ambient_light_sensor"
def __unicode__(self):
return self.ambient_light_sensor_id
def data_as_json(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.ambient_light_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.ambient_light_sensor_id,
illuminance=self.illuminance,
zone=zone_req,
identifiable=metadata['identifiable'],
nickname=self.nickname.encode('utf-8').title(),
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
bemoss=metadata['bemoss'],
mac_address=metadata['mac_address'].encode('utf-8'))
def device_status(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.ambient_light_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.ambient_light_sensor_id,
nickname=self.nickname.encode('utf-8').title(),
device_model=metadata['device_model'],
date_added=metadata['date_added'],
zone=zone_req,
zone_nickname=zone_req['zone_nickname'],
bemoss=metadata['bemoss'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time,
last_offline=self.last_offline_time)
def data_dashboard(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.ambient_light_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.ambient_light_sensor_id,
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
date_added=metadata['date_added'],
identifiable=metadata['identifiable'],
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time)
def data_side_nav(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.ambient_light_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.ambient_light_sensor_id,
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize())
#Motion Sensor Data
class MotionSensor(models.Model):
motion_sensor = models.ForeignKey(DeviceMetadata, max_length=50, primary_key=True)
motion = models.BooleanField()
ip_address = models.IPAddressField()
nickname = models.CharField(max_length=30)
zone = models.ForeignKey(Building_Zone)
network_status = models.CharField(max_length=7)
other_parameters = models.CharField(max_length=200, null=True, blank=True)
last_scanned_time = models.DateTimeField(null=True, blank=True)
last_offline_time = models.DateTimeField(null=True, blank=True)
class Meta:
db_table = "motion_sensor"
def __unicode__(self):
return self.motion_sensor_id
def data_as_json(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.motion_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.motion_sensor_id,
motion=self.motion,
zone=zone_req,
identifiable=metadata['identifiable'],
nickname=self.nickname.encode('utf-8').title(),
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
bemoss=metadata['bemoss'],
mac_address=metadata['mac_address'].encode('utf-8'))
def device_status(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.motion_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.motion_sensor_id,
nickname=self.nickname.encode('utf-8').title(),
device_model=metadata['device_model'],
date_added=metadata['date_added'],
zone=zone_req,
zone_nickname=zone_req['zone_nickname'],
bemoss=metadata['bemoss'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time,
last_offline=self.last_offline_time)
def data_dashboard(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.motion_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.motion_sensor_id,
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
date_added=metadata['date_added'],
identifiable=metadata['identifiable'],
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time)
def data_side_nav(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.motion_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.motion_sensor_id,
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize())
class Hub(models.Model):
hub = models.ForeignKey(DeviceMetadata, max_length=50, primary_key=True)
location = models.CharField(max_length=50, null=True, blank=True)
firmware_version = models.CharField(max_length=50, null=True, blank=True)
factory_id = models.CharField(max_length=50, null=True, blank=True)
firmware_update_available = models.NullBooleanField(null=True, blank=True)
battery = models.PositiveIntegerField(validators=[MinValueValidator(0), MaxValueValidator(100)], null=True,
blank=True)
signal_strength = models.PositiveIntegerField(validators=[MinValueValidator(0), MaxValueValidator(100)], null=True,
blank=True)
ip_address = models.IPAddressField()
nickname = models.CharField(max_length=30)
zone = models.ForeignKey(Building_Zone)
network_status = models.CharField(max_length=7)
other_parameters = models.CharField(max_length=200, null=True, blank=True)
last_scanned_time = models.DateTimeField(null=True, blank=True)
last_offline_time = models.DateTimeField(null=True, blank=True)
class Meta:
db_table = "hub"
def __unicode__(self):
return self.hub_id
def data_as_json(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.hub_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.hub_id,
location=self.location,
firmware_version=self.firmware_version,
factory_id=self.factory_id,
firmware_update_availabile=self.firmware_update_available,
battery=self.battery,
signal_strength=self.signal_strength,
zone=zone_req,
bemoss=metadata['bemoss'],
nickname=self.nickname.encode('utf-8').title(),
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'))
def device_status(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.hub_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.hub_id,
nickname=self.nickname.encode('utf-8').title(),
device_model=metadata['device_model'],
date_added=metadata['date_added'],
zone=zone_req,
zone_nickname=zone_req['zone_nickname'],
bemoss=metadata['bemoss'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time,
last_offline=self.last_offline_time)
def data_dashboard(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.hub_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.hub_id,
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
date_added=metadata['date_added'],
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time)
def data_side_nav(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.hub_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.hub_id,
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize())
class MultiSensor(models.Model):
multi_sensor = models.ForeignKey(DeviceMetadata, max_length=50, primary_key=True)
acceleration = models.CharField(max_length=10, null=True, blank=True)
contact = models.CharField(max_length=10, null=True, blank=True)
battery = models.PositiveIntegerField(validators=[MinValueValidator(0), MaxValueValidator(100)], null=True,
blank=True)
temperature = models.IntegerField(null=True, blank=True)
lqi = models.IntegerField(null=True, blank=True)
rssi = models.IntegerField(null=True, blank=True)
three_axis = models.CharField(max_length=20, null=True, blank=True)
ip_address = models.IPAddressField()
nickname = models.CharField(max_length=30)
zone = models.ForeignKey(Building_Zone)
network_status = models.CharField(max_length=7)
other_parameters = models.CharField(max_length=200, null=True, blank=True)
last_scanned_time = models.DateTimeField(null=True, blank=True)
last_offline_time = models.DateTimeField(null=True, blank=True)
class Meta:
db_table = "multi_sensor"
def __unicode__(self):
return self.multi_sensor_id
def data_as_json(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.multi_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.multi_sensor_id,
acceleration=self.acceleration,
contact=self.contact,
battery=self.battery,
temperature=self.temperature,
lqi=self.lqi,
rssi=self.rssi,
three_axis=self.three_axis,
zone=zone_req,
nickname=self.nickname.encode('utf-8').title(),
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
bemoss=metadata['bemoss'],
mac_address=metadata['factory_id'].encode('utf-8'))
def device_status(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.multi_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.multi_sensor_id,
nickname=self.nickname.encode('utf-8').title(),
device_model=metadata['device_model'],
date_added=metadata['date_added'],
zone=zone_req,
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time,
last_offline=self.last_offline_time)
def data_dashboard(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.multi_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.multi_sensor_id,
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['factory_id'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
date_added=metadata['date_added'],
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time)
def data_side_nav(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.multi_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.multi_sensor_id,
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize())
class PresenceSensor(models.Model):
presence_sensor = models.ForeignKey(DeviceMetadata, max_length=50, primary_key=True)
presence = models.CharField(max_length=10, null=True, blank=True)
battery = models.PositiveIntegerField(validators=[MinValueValidator(0), MaxValueValidator(100)], null=True,
blank=True)
lqi = models.IntegerField(null=True, blank=True)
rssi = models.IntegerField(null=True, blank=True)
ip_address = models.IPAddressField()
nickname = models.CharField(max_length=30)
zone = models.ForeignKey(Building_Zone)
network_status = models.CharField(max_length=7)
other_parameters = models.CharField(max_length=200, null=True, blank=True)
last_scanned_time = models.DateTimeField(null=True, blank=True)
last_offline_time = models.DateTimeField(null=True, blank=True)
class Meta:
db_table = "presence_sensor"
def __unicode__(self):
return self.presence_sensor_id
def data_as_json(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.presence_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.presence_sensor_id,
presence=self.presence,
battery=self.battery,
lqi=self.lqi,
rssi=self.rssi,
zone=zone_req,
nickname=self.nickname.encode('utf-8').title(),
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
bemoss=metadata['bemoss'],
mac_address=metadata['factory_id'].encode('utf-8'))
def device_status(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.presence_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
id=self.presence_sensor_id,
nickname=self.nickname.encode('utf-8').title(),
device_model=metadata['device_model'],
date_added=metadata['date_added'],
zone=zone_req,
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time,
last_offline=self.last_offline_time)
def data_dashboard(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.presence_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.presence_sensor_id,
device_type=metadata['device_type'].encode('utf-8'),
vendor_name=metadata['vendor_name'].encode('utf-8'),
device_model=metadata['device_model'].encode('utf-8'),
device_model_id=metadata['device_model_id'],
mac_address=metadata['factory_id'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
date_added=metadata['date_added'],
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize(),
last_scanned=self.last_scanned_time)
def data_side_nav(self):
zone_req = Building_Zone.as_json(self.zone)
device_info = DeviceMetadata.objects.get(device_id=self.presence_sensor_id)
metadata = DeviceMetadata.data_as_json(device_info)
return dict(
device_id=self.presence_sensor_id,
device_model_id=metadata['device_model_id'],
mac_address=metadata['mac_address'].encode('utf-8'),
nickname=self.nickname.encode('utf-8').title(),
zone_id=zone_req['id'],
bemoss=metadata['bemoss'],
zone_nickname=zone_req['zone_nickname'],
network_status=self.network_status.capitalize())
| 47.940171
| 119
| 0.675165
| 3,445
| 28,045
| 5.237155
| 0.08447
| 0.017515
| 0.043232
| 0.042401
| 0.837878
| 0.833777
| 0.81277
| 0.81277
| 0.81277
| 0.798526
| 0
| 0.008694
| 0.216616
| 28,045
| 584
| 120
| 48.02226
| 0.812472
| 0.093386
| 0
| 0.89002
| 0
| 0
| 0.079066
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0611
| false
| 0
| 0.00611
| 0.01222
| 0.291242
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
673be4ea27d8b5ca94019b00990c971ff0157af4
| 21,560
|
py
|
Python
|
tests/config/test_data.py
|
titu1994/pyshac
|
63edafb8b80a9d2dec7c27b023569df56a659894
|
[
"MIT"
] | 20
|
2018-06-29T05:32:10.000Z
|
2022-02-02T17:12:41.000Z
|
tests/config/test_data.py
|
titu1994/pyshac
|
63edafb8b80a9d2dec7c27b023569df56a659894
|
[
"MIT"
] | 9
|
2018-08-20T18:00:13.000Z
|
2019-01-09T20:36:45.000Z
|
tests/config/test_data.py
|
titu1994/pyshac
|
63edafb8b80a9d2dec7c27b023569df56a659894
|
[
"MIT"
] | 6
|
2018-08-13T15:15:14.000Z
|
2021-08-05T01:52:52.000Z
|
import os
import shutil
import six
import pytest
import numpy as np
from pyshac.config import hyperparameters as hp, data
# compatible with both Python 2 and 3
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
def deterministic_test(func):
@six.wraps(func)
def wrapper(*args, **kwargs):
np.random.seed(0)
output = func(*args, **kwargs)
np.random.seed(None)
return output
return wrapper
# wrapper function to clean up saved files
def cleanup_dirs(func):
@six.wraps(func)
def wrapper(*args, **kwargs):
output = func(*args, **kwargs)
# remove temporary files
if os.path.exists('shac/'):
shutil.rmtree('shac/')
if os.path.exists('custom/'):
shutil.rmtree('custom/')
return output
return wrapper
def get_hyperparameter_list():
h1 = hp.DiscreteHyperParameter('h1', [0, 1, 2])
h2 = hp.DiscreteHyperParameter('h2', [3, 4, 5, 6])
h3 = hp.UniformContinuousHyperParameter('h3', 7, 10)
h4 = hp.DiscreteHyperParameter('h4', ['v1', 'v2'])
return [h1, h2, h3, h4]
def get_multi_parameter_list():
h1 = hp.MultiDiscreteHyperParameter('h1', [0, 1, 2], sample_count=2)
h2 = hp.MultiDiscreteHyperParameter('h2', [3, 4, 5, 6], sample_count=3)
h3 = hp.MultiUniformContinuousHyperParameter('h3', 7, 10, sample_count=5)
h4 = hp.MultiDiscreteHyperParameter('h4', ['v1', 'v2'], sample_count=4)
return [h1, h2, h3, h4]
@cleanup_dirs
def test_dataset_param_list():
params = get_hyperparameter_list()
dataset = data.Dataset(params)
assert isinstance(dataset._parameters, hp.HyperParameterList)
dataset.set_parameters(params)
assert isinstance(dataset._parameters, hp.HyperParameterList)
h = hp.HyperParameterList(params)
dataset.set_parameters(h)
assert isinstance(dataset._parameters, hp.HyperParameterList)
@cleanup_dirs
def test_dataset_multi_param_list():
params = get_multi_parameter_list()
dataset = data.Dataset(params)
assert isinstance(dataset._parameters, hp.HyperParameterList)
dataset.set_parameters(params)
assert isinstance(dataset._parameters, hp.HyperParameterList)
h = hp.HyperParameterList(params)
dataset.set_parameters(h)
assert isinstance(dataset._parameters, hp.HyperParameterList)
@cleanup_dirs
def test_dataset_basedir():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
assert os.path.exists(dataset.basedir)
@cleanup_dirs
def test_dataset_basedir_custom():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h, basedir='custom')
assert os.path.exists(dataset.basedir)
assert not os.path.exists('shac')
@cleanup_dirs
def test_dataset_add_sample():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
x, y = dataset.get_dataset()
assert len(dataset) == 5
assert x.shape == (5, 4)
assert y.shape == (5,)
@cleanup_dirs
def test_dataset_multi_add_sample():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
x, y = dataset.get_dataset()
assert len(dataset) == 5
assert x.shape == (5, 14)
assert y.shape == (5,)
@cleanup_dirs
def test_set_dataset():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
# numpy arrays
samples = [(np.array(h.sample()), np.random.uniform()) for _ in range(5)]
x, y = zip(*samples)
x = np.array(x)
y = np.array(y)
dataset.set_dataset(x, y)
assert len(dataset) == 5
dataset.clear()
# python arrays
samples = [(h.sample(), float(np.random.uniform())) for _ in range(5)]
x, y = zip(*samples)
dataset.set_dataset(x, y)
assert len(dataset) == 5
# None data
with pytest.raises(TypeError):
dataset.set_dataset(None, int(6))
with pytest.raises(TypeError):
dataset.set_dataset([1, 2, 3], None)
with pytest.raises(TypeError):
dataset.set_dataset(None, None)
@cleanup_dirs
def test_multi_set_dataset():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
# numpy arrays
samples = [(np.array(h.sample()), np.random.uniform()) for _ in range(5)]
x, y = zip(*samples)
x = np.array(x)
y = np.array(y)
dataset.set_dataset(x, y)
assert len(dataset) == 5
dataset.clear()
# python arrays
samples = [(h.sample(), float(np.random.uniform())) for _ in range(5)]
x, y = zip(*samples)
dataset.set_dataset(x, y)
assert len(dataset) == 5
# None data
with pytest.raises(TypeError):
dataset.set_dataset(None, int(6))
with pytest.raises(TypeError):
dataset.set_dataset([1, 2, 3], None)
with pytest.raises(TypeError):
dataset.set_dataset(None, None)
@cleanup_dirs
@deterministic_test
def test_dataset_get_best_parameters():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
with pytest.raises(ValueError):
dataset.get_best_parameters(None)
# Test with empty dataset
assert dataset.get_best_parameters() is None
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
objective_values = [v for h, v in samples]
min_index = np.argmin(objective_values)
max_index = np.argmax(objective_values)
max_hp = list(dataset.get_best_parameters(objective='max').values())
min_hp = list(dataset.get_best_parameters(objective='min').values())
assert max_hp == samples[max_index][0]
assert min_hp == samples[min_index][0]
@cleanup_dirs
@deterministic_test
def test_dataset_multi_get_best_parameters():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
with pytest.raises(ValueError):
dataset.get_best_parameters(None)
# Test with empty dataset
assert dataset.get_best_parameters() is None
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
objective_values = [v for h, v in samples]
min_index = np.argmin(objective_values)
max_index = np.argmax(objective_values)
max_hp = data.flatten_parameters(dataset.get_best_parameters(objective='max'))
min_hp = data.flatten_parameters(dataset.get_best_parameters(objective='min'))
assert max_hp == samples[max_index][0]
assert min_hp == samples[min_index][0]
@cleanup_dirs
def test_dataset_parameters():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
assert len(params) == len(dataset.parameters)
dataset.parameters = params
assert len(params) == len(dataset.parameters)
@cleanup_dirs
def test_dataset_serialization_deserialization():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
# serialization
dataset.save_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization
dataset.clear()
assert len(dataset) == 0
dataset.restore_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization from class
path = os.path.join('shac', 'datasets')
dataset2 = data.Dataset.load_from_directory(path)
assert dataset2.parameters is not None
assert len(dataset2.X) == 5
assert len(dataset2.Y) == 5
assert len(dataset2) == 5
dataset3 = data.Dataset.load_from_directory()
assert dataset3.parameters is not None
assert len(dataset3.X) == 5
assert len(dataset3.Y) == 5
# serialization of empty get_dataset
dataset = data.Dataset()
with pytest.raises(FileNotFoundError):
dataset.load_from_directory('null')
with pytest.raises(ValueError):
dataset.save_dataset()
@cleanup_dirs
def test_dataset_multi_serialization_deserialization():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
# serialization
dataset.save_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization
dataset.clear()
assert len(dataset) == 0
dataset.restore_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization from class
path = os.path.join('shac', 'datasets')
dataset2 = data.Dataset.load_from_directory(path)
assert dataset2.parameters is not None
assert len(dataset2.X) == 5
assert len(dataset2.Y) == 5
assert len(dataset2) == 5
dataset3 = data.Dataset.load_from_directory()
assert dataset3.parameters is not None
assert len(dataset3.X) == 5
assert len(dataset3.Y) == 5
# serialization of empty get_dataset
dataset = data.Dataset()
with pytest.raises(FileNotFoundError):
dataset.load_from_directory('null')
with pytest.raises(ValueError):
dataset.save_dataset()
@cleanup_dirs
def test_dataset_serialization_deserialization_custom_basepath():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h, basedir='custom')
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
# serialization
dataset.save_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization
dataset.clear()
assert len(dataset) == 0
dataset.restore_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization from class
path = os.path.join('custom', 'datasets')
dataset2 = data.Dataset.load_from_directory(path)
assert dataset2.parameters is not None
assert len(dataset2.X) == 5
assert len(dataset2.Y) == 5
assert len(dataset2) == 5
dataset3 = data.Dataset.load_from_directory('custom')
assert dataset3.parameters is not None
assert len(dataset3.X) == 5
assert len(dataset3.Y) == 5
# serialization of empty get_dataset
dataset = data.Dataset(basedir='custom')
with pytest.raises(FileNotFoundError):
dataset.load_from_directory('null')
with pytest.raises(ValueError):
dataset.save_dataset()
@cleanup_dirs
def test_dataset_serialization_deserialization_custom_param():
class MockDiscreteHyperParameter(hp.DiscreteHyperParameter):
def __init__(self, name, values, seed=None):
super(MockDiscreteHyperParameter, self).__init__(name, values, seed)
# register the new hyper parameters
hp.set_custom_parameter_class(MockDiscreteHyperParameter)
params = get_hyperparameter_list()
params.append(MockDiscreteHyperParameter('mock-param', ['x', 'y']))
h = hp.HyperParameterList(params, seed=0)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
# serialization
dataset.save_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization
dataset.clear()
assert len(dataset) == 0
dataset.restore_dataset()
assert len(dataset) == 5
assert os.path.exists(dataset.data_path)
assert os.path.exists(dataset.parameter_path)
# deserialization from class
path = os.path.join('shac', 'datasets')
dataset2 = data.Dataset.load_from_directory(path)
assert dataset2.parameters is not None
assert len(dataset2.X) == 5
assert len(dataset2.Y) == 5
assert len(dataset2) == 5
assert 'mock-param' in dataset2.parameters.name_map.values()
assert dataset2.parameters.num_choices == 5
dataset3 = data.Dataset.load_from_directory()
assert dataset3.parameters is not None
assert len(dataset3.X) == 5
assert len(dataset3.Y) == 5
assert 'mock-param' in dataset3.parameters.name_map.values()
assert dataset3.parameters.num_choices == 5
# serialization of empty get_dataset
dataset = data.Dataset()
with pytest.raises(FileNotFoundError):
dataset.load_from_directory('null')
with pytest.raises(ValueError):
dataset.save_dataset()
@cleanup_dirs
@deterministic_test
def test_dataset_single_encoding_decoding():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
sample = (h.sample(), np.random.uniform())
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset()
y_values = [0.]
assert encoded_x.shape == (1, 4)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (1,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
assert decoded_x.shape == (1, 4)
@cleanup_dirs
@deterministic_test
def test_dataset_single_multi_encoding_decoding():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
sample = (h.sample(), np.random.uniform())
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset()
y_values = [0.]
assert encoded_x.shape == (1, 14)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (1,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
assert decoded_x.shape == (1, 14)
@cleanup_dirs
@deterministic_test
def test_dataset_single_encoding_decoding_min():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
sample = (h.sample(), np.random.uniform())
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset(objective='min')
y_values = [0.]
assert encoded_x.shape == (1, 4)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (1,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
assert decoded_x.shape == (1, 4)
@cleanup_dirs
@deterministic_test
def test_dataset_single_multi_encoding_decoding_min():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params)
dataset = data.Dataset(h)
sample = (h.sample(), np.random.uniform())
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset(objective='min')
y_values = [0.]
assert encoded_x.shape == (1, 14)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (1,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
assert decoded_x.shape == (1, 14)
@cleanup_dirs
@deterministic_test
def test_dataset_encoding_decoding():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params, seed=0)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset(objective='min')
y_values = [0., 0., 0., 1., 1.]
assert encoded_x.shape == (5, 4)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
decoded_x2 = dataset.decode_dataset()
assert decoded_x.shape == (5, 4)
assert len(decoded_x) == len(decoded_x2)
x, y = dataset.get_dataset()
x_ = x[:, :3].astype('float')
decoded_x_ = decoded_x[:, :3].astype('float')
assert np.allclose(x_, decoded_x_, rtol=1e-3)
samples2 = [(h.sample(), np.random.uniform()) for _ in range(5)]
x, y = zip(*samples2)
encoded_x, encoded_y = dataset.encode_dataset(x, y, objective='min')
y_values = [0., 1., 0., 0., 1.]
assert encoded_x.shape == (5, 4)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
@cleanup_dirs
@deterministic_test
def test_dataset_multi_encoding_decoding():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params, seed=0)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset(objective='min')
y_values = [0., 0., 0., 1., 1.]
assert encoded_x.shape == (5, 14)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
decoded_x2 = dataset.decode_dataset()
assert decoded_x.shape == (5, 14)
assert len(decoded_x) == len(decoded_x2)
x, y = dataset.get_dataset()
x_ = x[:, :10].astype('float')
decoded_x_ = decoded_x[:, :10].astype('float')
assert np.allclose(x_, decoded_x_, rtol=1e-3)
samples2 = [(h.sample(), np.random.uniform()) for _ in range(5)]
x, y = zip(*samples2)
encoded_x, encoded_y = dataset.encode_dataset(x, y, objective='min')
y_values = [0., 1., 0., 0., 1.]
assert encoded_x.shape == (5, 14)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
@cleanup_dirs
@deterministic_test
def test_dataset_encoding_decoding_min():
params = get_hyperparameter_list()
h = hp.HyperParameterList(params, seed=0)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset(objective='min')
y_values = [0., 0., 0., 1., 1.]
assert encoded_x.shape == (5, 4)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
assert decoded_x.shape == (5, 4)
x, y = dataset.get_dataset()
x_ = x[:, :3].astype('float')
decoded_x_ = decoded_x[:, :3].astype('float')
assert np.allclose(x_, decoded_x_, rtol=1e-3)
samples2 = [(h.sample(), np.random.uniform()) for _ in range(5)]
x, y = zip(*samples2)
encoded_x, encoded_y = dataset.encode_dataset(x, y, objective='min')
y_values = [0., 1., 0., 0., 1.]
assert encoded_x.shape == (5, 4)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
@cleanup_dirs
@deterministic_test
def test_dataset_multi_encoding_decoding_min():
params = get_multi_parameter_list()
h = hp.HyperParameterList(params, seed=0)
dataset = data.Dataset(h)
samples = [(h.sample(), np.random.uniform()) for _ in range(5)]
for sample in samples:
dataset.add_sample(*sample)
encoded_x, encoded_y = dataset.encode_dataset(objective='min')
y_values = [0., 0., 0., 1., 1.]
assert encoded_x.shape == (5, 14)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
assert np.allclose(y_values, encoded_y, rtol=1e-3)
decoded_x = dataset.decode_dataset(encoded_x)
assert decoded_x.shape == (5, 14)
x, y = dataset.get_dataset()
x_ = x[:, :10].astype('float')
decoded_x_ = decoded_x[:, :10].astype('float')
assert np.allclose(x_, decoded_x_, rtol=1e-3)
samples2 = [(h.sample(), np.random.uniform()) for _ in range(5)]
x, y = zip(*samples2)
encoded_x, encoded_y = dataset.encode_dataset(x, y, objective='min')
y_values = [0., 1., 0., 0., 1.]
assert encoded_x.shape == (5, 14)
assert encoded_x.dtype == np.float64
assert encoded_y.shape == (5,)
assert encoded_y.dtype == np.float64
print(encoded_y)
assert np.allclose(y_values, encoded_y, rtol=1e-3)
if __name__ == '__main__':
pytest.main([__file__])
| 27.5
| 82
| 0.68103
| 2,876
| 21,560
| 4.910292
| 0.058414
| 0.027758
| 0.034414
| 0.043974
| 0.898102
| 0.878275
| 0.864042
| 0.852712
| 0.840674
| 0.832885
| 0
| 0.022395
| 0.194341
| 21,560
| 783
| 83
| 27.535121
| 0.790616
| 0.028896
| 0
| 0.859583
| 0
| 0
| 0.012627
| 0
| 0
| 0
| 0
| 0
| 0.305503
| 1
| 0.056926
| false
| 0
| 0.011385
| 0
| 0.081594
| 0.001898
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
676947a9d0b90e8634d1764216af93de768e425b
| 222,397
|
py
|
Python
|
pelicun/tests/test_control.py
|
dnicruz/pelicun
|
74ed52acfe8d5a47cc553586ff0c9f89c4094351
|
[
"BSD-3-Clause"
] | null | null | null |
pelicun/tests/test_control.py
|
dnicruz/pelicun
|
74ed52acfe8d5a47cc553586ff0c9f89c4094351
|
[
"BSD-3-Clause"
] | null | null | null |
pelicun/tests/test_control.py
|
dnicruz/pelicun
|
74ed52acfe8d5a47cc553586ff0c9f89c4094351
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
# This file is part of pelicun.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# You should have received a copy of the BSD 3-Clause License along with
# pelicun. If not, see <http://www.opensource.org/licenses/>.
#
# Contributors:
# Adam Zsarnóczay
"""
This subpackage performs system tests on the control module of pelicun.
"""
import pytest
import numpy as np
from numpy.testing import assert_allclose
from scipy.stats import truncnorm as tnorm
from copy import deepcopy
import os, sys, inspect
current_dir = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0,os.path.dirname(parent_dir))
from pelicun.control import *
from pelicun.uq import mvn_orthotope_density as mvn_od
from pelicun.tests.test_pelicun import prob_allclose, prob_approx
# -----------------------------------------------------------------------------
# FEMA_P58_Assessment
# -----------------------------------------------------------------------------
def test_FEMA_P58_Assessment_central_tendencies():
"""
Perform a loss assessment with customized inputs that reduce the
dispersion of calculation parameters to negligible levels. This allows us
to test the results against pre-defined reference values in spite of the
randomness involved in the calculations.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())[0]
assert RV_EDP.theta[0] == pytest.approx(0.5 * g)
assert RV_EDP.theta[1] == pytest.approx(0.5 * g * 1e-6, abs=1e-7)
assert RV_EDP._distribution == 'lognormal'
# QNT
assert A._QNT_dict is None
#RV_QNT = A._RV_dict['QNT']
#assert RV_QNT is None
# FRG
RV_FRG = list(A._FF_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_FRG]).T
assert_allclose(thetas, np.array([0.444, 0.6, 0.984]) * g, rtol=0.01)
assert_allclose(betas, np.array([0.3, 0.4, 0.5]), rtol=0.01)
rho = RV_FRG[0].RV_set.Rho()
assert_allclose(rho, np.ones((3, 3)), rtol=0.01)
assert np.all([rv.distribution == 'lognormal' for rv in RV_FRG])
# RED
RV_RED = list(A._DV_RED_dict.values())
mus, sigmas = np.array([rv.theta for rv in RV_RED]).T
assert_allclose(mus, np.ones(2), rtol=0.01)
assert_allclose(sigmas, np.array([1e-4, 1e-4]), rtol=0.01)
rho = RV_RED[0].RV_set.Rho()
assert_allclose(rho, np.array([[1, 0], [0, 1]]), rtol=0.01)
assert np.all([rv.distribution == 'normal' for rv in RV_RED])
assert_allclose (RV_RED[0].truncation_limits, [0., 2.], rtol=0.01)
assert_allclose (RV_RED[1].truncation_limits, [0., 4.], rtol=0.01)
# INJ
RV_INJ = list(A._DV_INJ_dict.values())
mus, sigmas = np.array([rv.theta for rv in RV_INJ]).T
assert_allclose(mus, np.ones(4), rtol=0.01)
assert_allclose(sigmas, np.ones(4) * 1e-4, rtol=0.01)
rho = RV_INJ[0].RV_set.Rho()
rho_target = np.zeros((4, 4))
np.fill_diagonal(rho_target, 1.)
assert_allclose(rho, rho_target, rtol=0.01)
assert np.all([rv.distribution == 'normal' for rv in RV_INJ])
assert_allclose(RV_INJ[0].truncation_limits, [0., 10./3.], rtol=0.01)
assert_allclose(RV_INJ[1].truncation_limits, [0., 10./3.], rtol=0.01)
assert_allclose(RV_INJ[2].truncation_limits, [0., 10.], rtol=0.01)
assert_allclose(RV_INJ[3].truncation_limits, [0., 10.], rtol=0.01)
# REP
RV_REP = list(A._DV_REP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_REP]).T
assert_allclose(thetas, np.ones(6), rtol=0.01)
assert_allclose(betas, np.ones(6) * 1e-4, rtol=0.01)
rho = RV_REP[0].RV_set.Rho()
rho_target = np.zeros((6, 6))
np.fill_diagonal(rho_target, 1.)
assert_allclose(rho, rho_target, rtol=0.01)
assert np.all([rv.distribution == 'lognormal' for rv in RV_REP])
# ------------------------------------------------------------------------
A.define_loss_model()
# QNT (deterministic)
QNT = A._FG_dict['T0001.001']._performance_groups[0]._quantity
assert QNT == pytest.approx(50., rel=0.01)
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# TIME
T_check = A._TIME.describe().T.loc[['hour','month','weekday?'],:]
assert_allclose(T_check['mean'], np.array([11.5, 5.5, 5. / 7.]), rtol=0.05)
assert_allclose(T_check['min'], np.array([0., 0., 0.]), rtol=0.01)
assert_allclose(T_check['max'], np.array([23., 11., 1.]), rtol=0.01)
assert_allclose(T_check['50%'], np.array([12., 5., 1.]), atol=1.0)
assert_allclose(T_check['count'], np.array([10000., 10000., 10000.]),
rtol=0.01)
# POP
P_CDF = A._POP.describe(np.arange(1, 27) / 27.).iloc[:, 0].values[4:]
vals, counts = np.unique(P_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 2.5, 5., 10.]), rtol=0.01)
assert_allclose(counts, np.array([14, 2, 7, 5]), atol=1)
# COL
COL_check = A._COL.describe().T
assert COL_check['mean'].values[0] == pytest.approx(0.5, rel=0.05)
assert len(A._ID_dict['non-collapse']) == pytest.approx(5000, rel=0.05)
assert len(A._ID_dict['collapse']) == pytest.approx(5000, rel=0.05)
# DMG
DMG_check = A._DMG.describe().T
assert_allclose(DMG_check['mean'], np.array([17.074, 17.074, 7.9361]),
rtol=0.1, atol=1.0)
assert_allclose(DMG_check['min'], np.zeros(3), rtol=0.01)
assert_allclose(DMG_check['max'], np.ones(3) * 50.0157, rtol=0.05)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# RED
DV_RED = A._DV_dict['red_tag'].describe().T
assert_allclose(DV_RED['mean'], np.array([0.341344, 0.1586555]), rtol=0.1)
# INJ - collapse
DV_INJ_C = deepcopy(A._COL[['INJ-0', 'INJ-1']])
DV_INJ_C.dropna(inplace=True)
NC_count = DV_INJ_C.describe().T['count'][0]
assert_allclose(NC_count, np.ones(2) * 5000, rtol=0.05)
# lvl 1
vals, counts = np.unique(DV_INJ_C.iloc[:, 0].values, return_counts=True)
assert_allclose(vals, np.array([0., 2.5, 5., 10.]) * 0.1, rtol=0.01)
assert_allclose(counts / NC_count, np.array([14, 2, 7, 5]) / 28., atol=0.01, rtol=0.1)
# lvl 2
vals, counts = np.unique(DV_INJ_C.iloc[:, 1].values, return_counts=True)
assert_allclose(vals, np.array([0., 2.5, 5., 10.]) * 0.9, rtol=0.01)
assert_allclose(counts / NC_count, np.array([14, 2, 7, 5]) / 28., atol=0.01, rtol=0.1)
# INJ - non-collapse
DV_INJ_NC = deepcopy(A._DV_dict['injuries'])
DV_INJ_NC[0].dropna(inplace=True)
assert_allclose(DV_INJ_NC[0].describe().T['count'], np.ones(2) * 5000,
rtol=0.05)
# lvl 1 DS2
I_CDF = DV_INJ_NC[0].iloc[:, 0]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.075, 0.15, 0.3]), rtol=0.01)
target_prob = np.array(
[0.6586555, 0., 0., 0.] + 0.3413445 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# lvl 1 DS3
I_CDF = DV_INJ_NC[0].iloc[:, 1]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.075, 0.15, 0.3]), rtol=0.01)
target_prob = np.array(
[0.8413445, 0., 0., 0.] + 0.1586555 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# lvl 2 DS2
I_CDF = DV_INJ_NC[1].iloc[:, 0]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.025, 0.05, 0.1]), rtol=0.01)
target_prob = np.array(
[0.6586555, 0., 0., 0.] + 0.3413445 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# lvl2 DS3
I_CDF = DV_INJ_NC[1].iloc[:, 1]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.025, 0.05, 0.1]), rtol=0.01)
target_prob = np.array(
[0.8413445, 0., 0., 0.] + 0.1586555 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# REP
assert len(A._ID_dict['non-collapse']) == len(A._ID_dict['repairable'])
assert len(A._ID_dict['irreparable']) == 0
# cost
DV_COST = A._DV_dict['rec_cost']
# DS1
C_CDF = DV_COST.iloc[:, 0]
C_CDF = np.around(C_CDF / 10., decimals=0) * 10.
vals, counts = np.unique(C_CDF, return_counts=True)
assert_allclose(vals, [0, 2500], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS2
C_CDF = DV_COST.iloc[:, 1]
C_CDF = np.around(C_CDF / 100., decimals=0) * 100.
vals, counts = np.unique(C_CDF, return_counts=True)
assert_allclose(vals, [0, 25000], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS3
C_CDF = DV_COST.iloc[:, 2]
C_CDF = np.around(C_CDF / 1000., decimals=0) * 1000.
vals, counts = np.unique(C_CDF, return_counts=True)
assert_allclose(vals, [0, 250000], rtol=0.01)
t_prob = 0.1586555
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# time
DV_TIME = A._DV_dict['rec_time']
# DS1
T_CDF = DV_TIME.iloc[:, 0]
T_CDF = np.around(T_CDF, decimals=1)
vals, counts = np.unique(T_CDF, return_counts=True)
assert_allclose(vals, [0, 2.5], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS2
T_CDF = DV_TIME.iloc[:, 1]
T_CDF = np.around(T_CDF, decimals=0)
vals, counts = np.unique(T_CDF, return_counts=True)
assert_allclose(vals, [0, 25], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS3
T_CDF = DV_TIME.iloc[:, 2]
T_CDF = np.around(T_CDF / 10., decimals=0) * 10.
vals, counts = np.unique(T_CDF, return_counts=True)
assert_allclose(vals, [0, 250], rtol=0.01)
t_prob = 0.1586555
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
S = A._SUMMARY
SD = S.describe().T
assert_allclose(S[('event time', 'month')], A._TIME['month'] + 1)
assert_allclose(S[('event time', 'weekday?')], A._TIME['weekday?'])
assert_allclose(S[('event time', 'hour')], A._TIME['hour'])
assert_allclose(S[('inhabitants', '')], A._POP.iloc[:, 0])
assert SD.loc[('collapses', 'collapsed'), 'mean'] == pytest.approx(0.5,
rel=0.05)
assert SD.loc[('collapses', 'mode'), 'mean'] == 0.
assert SD.loc[('collapses', 'mode'), 'count'] == pytest.approx(5000,
rel=0.05)
assert SD.loc[('red tagged', ''), 'mean'] == pytest.approx(0.5, rel=0.05)
assert SD.loc[('red tagged', ''), 'count'] == pytest.approx(5000, rel=0.05)
for col in ['irreparable', 'cost impractical', 'time impractical']:
assert SD.loc[('reconstruction', col), 'mean'] == 0.
assert SD.loc[('reconstruction', col), 'count'] == pytest.approx(5000,
rel=0.05)
RC = deepcopy(S.loc[:, ('reconstruction', 'cost')])
RC_CDF = np.around(RC / 1000., decimals=0) * 1000.
vals, counts = np.unique(RC_CDF, return_counts=True)
assert_allclose(vals, np.array([0, 2., 3., 25., 250., 300.]) * 1000.)
t_prob1 = 0.3413445 / 2.
t_prob2 = 0.1586555 / 2.
assert_allclose(counts / 10000.,
[t_prob2, t_prob1 / 2., t_prob1 / 2., t_prob1, t_prob2,
0.5], atol=0.01, rtol=0.1)
RT = deepcopy(S.loc[:, ('reconstruction', 'time-parallel')])
RT_CDF = np.around(RT, decimals=0)
vals, counts = np.unique(RT_CDF, return_counts=True)
assert_allclose(vals, np.array([0, 2., 3., 25., 250., 300.]))
t_prob1 = 0.3413445 / 2.
t_prob2 = 0.1586555 / 2.
assert_allclose(counts / 10000.,
[t_prob2, t_prob1 / 2., t_prob1 / 2., t_prob1, t_prob2,
0.5], atol=0.01, rtol=0.1)
assert_allclose(S.loc[:, ('reconstruction', 'time-parallel')],
S.loc[:, ('reconstruction', 'time-sequential')])
CAS = deepcopy(S.loc[:, ('injuries', 'sev1')])
CAS_CDF = np.around(CAS, decimals=3)
vals, counts = np.unique(CAS_CDF, return_counts=True)
assert_allclose(vals, [0, 0.075, 0.15, 0.25, 0.3, 0.5, 1.])
assert_allclose(counts / 10000.,
np.array([35, 1, 3.5, 2, 2.5, 7, 5]) / 56., atol=0.01,
rtol=0.1)
CAS = deepcopy(S.loc[:, ('injuries', 'sev2')])
CAS_CDF = np.around(CAS, decimals=3)
vals, counts = np.unique(CAS_CDF, return_counts=True)
assert_allclose(vals, [0, 0.025, 0.05, 0.1, 2.25, 4.5, 9.])
assert_allclose(counts / 10000.,
np.array([35, 1, 3.5, 2.5, 2, 7, 5]) / 56., atol=0.01,
rtol=0.1)
def test_FEMA_P58_Assessment_EDP_uncertainty_basic():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_2.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_2.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
assert_allclose(thetas, [9.80665, 12.59198, 0.074081, 0.044932], rtol=0.02)
assert_allclose(betas, [0.25, 0.25, 0.3, 0.4], rtol=0.02)
rho = RV_EDP[0].RV_set.Rho()
rho_target = [
[1.0, 0.6, 0.3, 0.3],
[0.6, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.7],
[0.3, 0.3, 0.7, 1.0]]
assert_allclose(rho, rho_target, atol=0.05)
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# COL
COL_check = A._COL.describe().T
col_target = 1.0 - mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer(
[0.3, 0.4], [0.3, 0.4]),
upper=np.log([0.1, 0.1]))[0]
assert COL_check['mean'].values[0] == pytest.approx(col_target, rel=0.1)
# DMG
DMG_check = [len(np.where(A._DMG.iloc[:, i] > 0.0)[0]) / 10000. for i in
range(8)]
DMG_1_PID = mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.1]))[
0]
DMG_2_PID = mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 0.05488]), upper=np.log([0.1, 0.1]))[
0]
DMG_1_PFA = mvn_od(np.log([0.074081, 9.80665]),
np.array([[1, 0.3], [0.3, 1]]) * np.outer([0.3, 0.25],
[0.3, 0.25]),
lower=np.log([1e-6, 9.80665]),
upper=np.log([0.1, np.inf]))[0]
DMG_2_PFA = mvn_od(np.log([0.074081, 12.59198]),
np.array([[1, 0.3], [0.3, 1]]) * np.outer([0.3, 0.25],
[0.3, 0.25]),
lower=np.log([1e-6, 9.80665]),
upper=np.log([0.1, np.inf]))[0]
assert DMG_check[0] == pytest.approx(DMG_check[1], rel=0.01)
assert DMG_check[2] == pytest.approx(DMG_check[3], rel=0.01)
assert DMG_check[4] == pytest.approx(DMG_check[5], rel=0.01)
assert DMG_check[6] == pytest.approx(DMG_check[7], rel=0.01)
assert DMG_check[0] == pytest.approx(DMG_1_PID, rel=0.10)
assert DMG_check[2] == pytest.approx(DMG_2_PID, rel=0.10)
assert DMG_check[4] == pytest.approx(DMG_1_PFA, rel=0.10)
assert DMG_check[6] == pytest.approx(DMG_2_PFA, rel=0.10)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# COST
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
C_target = [0., 250., 1250.]
T_target = [0., 0.25, 1.25]
# PG 1011 and 1012
P_target = [
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 1e-6]), upper=np.log([0.05488, 0.1]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
]
for i in [0, 1]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 1021 and 1022
P_target = [
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 0.05488]), upper=np.log([0.05488, 0.1]))[0],
]
for i in [2, 3]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 2011 and 2012
P_target = [
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, 9.80665, np.inf]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 9.80665, 9.80665]),
upper=np.log([0.1, np.inf, np.inf]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 9.80665, 1e-6]),
upper=np.log([0.1, np.inf, 9.80665]))[0],
]
for i in [4, 5]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 2021 and 2022
P_target = [
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, np.inf, 9.80665]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 9.80665, 9.80665]),
upper=np.log([0.1, np.inf, np.inf]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 9.80665]),
upper=np.log([0.1, 9.80665, np.inf]))[0],
]
for i in [6, 7]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# RED TAG
RED_check = A._DV_dict['red_tag'].describe().T
RED_check = (RED_check['mean'] * RED_check['count'] / 10000.).values
assert RED_check[0] == pytest.approx(RED_check[1], rel=0.01)
assert RED_check[2] == pytest.approx(RED_check[3], rel=0.01)
assert RED_check[4] == pytest.approx(RED_check[5], rel=0.01)
assert RED_check[6] == pytest.approx(RED_check[7], rel=0.01)
assert RED_check[0] == pytest.approx(DMG_1_PID, rel=0.10)
assert RED_check[2] == pytest.approx(DMG_2_PID, rel=0.10)
assert RED_check[4] == pytest.approx(DMG_1_PFA, rel=0.10)
assert RED_check[6] == pytest.approx(DMG_2_PFA, rel=0.10)
DMG_on = np.where(A._DMG > 0.0)[0]
RED_on = np.where(A._DV_dict['red_tag'] > 0.0)[0]
assert_allclose(DMG_on, RED_on)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
P_no_RED_target = mvn_od(np.log([0.074081, 0.044932, 9.80665, 12.59198]),
np.array(
[[1.0, 0.7, 0.3, 0.3], [0.7, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.6],
[0.3, 0.3, 0.6, 1.0]]) * np.outer(
[0.3, 0.4, 0.25, 0.25],
[0.3, 0.4, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[0.05488, 0.05488, 9.80665, 9.80665]))[0]
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = (1.0 - SD.loc[('red tagged', ''), 'mean']) * SD.loc[
('red tagged', ''), 'count'] / 10000.
def test_FEMA_P58_Assessment_EDP_uncertainty_detection_limit():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
This test differs from the basic case in having unreliable EDP values above
a certain limit - a typical feature of interstory drifts in dynamic
simulations. Such cases should not be a problem if the limits can be
estimated and they are specified as detection limits in input file.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_3.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_3.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
EDP_theta_target = [9.80665, 12.59198, 0.074081, 0.044932]
EDP_beta_target = [0.25, 0.25, 0.3, 0.4]
assert_allclose(EDP_theta_test, EDP_theta_target, rtol=0.025)
assert_allclose(EDP_beta_test, EDP_beta_target, rtol=0.1)
rho = RV_EDP[0].RV_set.Rho()
EDP_rho_test = rho
EDP_rho_target = [
[1.0, 0.6, 0.3, 0.3],
[0.6, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.7],
[0.3, 0.3, 0.7, 1.0]]
EDP_COV_test = EDP_rho_test * np.outer(EDP_beta_test, EDP_beta_test)
assert_allclose(EDP_rho_test, EDP_rho_target, atol=0.15)
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# COL
COL_check = A._COL.describe().T
col_target = 1.0 - mvn_od(np.log(EDP_theta_test[2:]),
EDP_COV_test[2:, 2:],
upper=np.log([0.1, 0.1]))[0]
assert COL_check['mean'].values[0] == prob_approx(col_target, 0.03)
# DMG
DMG_check = [len(np.where(A._DMG.iloc[:, i] > 0.0)[0]) / 10000.
for i in range(8)]
DMG_1_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 1e-6]),
upper=np.log([0.1, 0.1]))[0]
DMG_2_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]),
upper=np.log([0.1, 0.1]))[0]
DMG_1_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
DMG_2_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
assert DMG_check[0] == pytest.approx(DMG_check[1], rel=0.01)
assert DMG_check[2] == pytest.approx(DMG_check[3], rel=0.01)
assert DMG_check[4] == pytest.approx(DMG_check[5], rel=0.01)
assert DMG_check[6] == pytest.approx(DMG_check[7], rel=0.01)
assert DMG_check[0] == prob_approx(DMG_1_PID, 0.03)
assert DMG_check[2] == prob_approx(DMG_2_PID, 0.03)
assert DMG_check[4] == prob_approx(DMG_1_PFA, 0.03)
assert DMG_check[6] == prob_approx(DMG_2_PFA, 0.03)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# COST
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
C_target = [0., 250., 1250.]
T_target = [0., 0.25, 1.25]
# PG 1011 and 1012
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.05488, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
]
for i in [0, 1]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 1021 and 1022
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]), upper=np.log([0.05488, 0.1]))[0],
]
for i in [2, 3]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 2011 and 2012
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([9.80665, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, 9.80665, 0.1, 0.1]))[0],
]
for i in [4, 5]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 2021 and 2022
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, 9.80665, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6]),
upper=np.log([9.80665, np.inf, 0.1, 0.1]))[0],
]
for i in [6, 7]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# RED TAG
RED_check = A._DV_dict['red_tag'].describe().T
RED_check = (RED_check['mean'] * RED_check['count'] / 10000.).values
assert RED_check[0] == pytest.approx(RED_check[1], rel=0.01)
assert RED_check[2] == pytest.approx(RED_check[3], rel=0.01)
assert RED_check[4] == pytest.approx(RED_check[5], rel=0.01)
assert RED_check[6] == pytest.approx(RED_check[7], rel=0.01)
assert RED_check[0] == prob_approx(DMG_1_PID, 0.03)
assert RED_check[2] == prob_approx(DMG_2_PID, 0.03)
assert RED_check[4] == prob_approx(DMG_1_PFA, 0.03)
assert RED_check[6] == prob_approx(DMG_2_PFA, 0.03)
DMG_on = np.where(A._DMG > 0.0)[0]
RED_on = np.where(A._DV_dict['red_tag'] > 0.0)[0]
assert_allclose(DMG_on, RED_on)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
P_no_RED_target = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([9.80665, 9.80665, 0.05488, 0.05488]))[0]
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = ((1.0 - SD.loc[('red tagged', ''), 'mean'])
* SD.loc[('red tagged', ''), 'count'] / 10000.)
assert P_no_RED_target == prob_approx(P_no_RED_test, 0.04)
def test_FEMA_P58_Assessment_EDP_uncertainty_failed_analyses():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
Here we use EDP results with unique values assigned to failed analyses.
In particular, PID=1.0 and PFA=100.0 are used when an analysis fails.
These values shall be handled by detection limits of 10 and 100 for PID
and PFA, respectively.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_4.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_4.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
EDP_theta_target = [9.80665, 12.59198, 0.074081, 0.044932]
EDP_beta_target = [0.25, 0.25, 0.3, 0.4]
assert_allclose(EDP_theta_test, EDP_theta_target, rtol=0.025)
assert_allclose(EDP_beta_test, EDP_beta_target, rtol=0.1)
rho = RV_EDP[0].RV_set.Rho()
EDP_rho_test = rho
EDP_rho_target = [
[1.0, 0.6, 0.3, 0.3],
[0.6, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.7],
[0.3, 0.3, 0.7, 1.0]]
EDP_COV_test = EDP_rho_test * np.outer(EDP_beta_test, EDP_beta_test)
assert_allclose(EDP_rho_test, EDP_rho_target, atol=0.15)
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# COL
COL_check = A._COL.describe().T
col_target = 1.0 - mvn_od(np.log(EDP_theta_test[2:]),
EDP_COV_test[2:,2:],
upper=np.log([0.1, 0.1]))[0]
assert COL_check['mean'].values[0] == prob_approx(col_target, 0.03)
# DMG
DMG_check = [len(np.where(A._DMG.iloc[:, i] > 0.0)[0]) / 10000.
for i in range(8)]
DMG_1_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:,2:],
lower=np.log([0.05488, 1e-6]),
upper=np.log([0.1, 0.1]))[0]
DMG_2_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]),
upper=np.log([0.1, 0.1]))[0]
DMG_1_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
DMG_2_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
assert DMG_check[0] == pytest.approx(DMG_check[1], rel=0.01)
assert DMG_check[2] == pytest.approx(DMG_check[3], rel=0.01)
assert DMG_check[4] == pytest.approx(DMG_check[5], rel=0.01)
assert DMG_check[6] == pytest.approx(DMG_check[7], rel=0.01)
assert DMG_check[0] == prob_approx(DMG_1_PID, 0.03)
assert DMG_check[2] == prob_approx(DMG_2_PID, 0.03)
assert DMG_check[4] == prob_approx(DMG_1_PFA, 0.03)
assert DMG_check[6] == prob_approx(DMG_2_PFA, 0.03)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# COST
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
C_target = [0., 250., 1250.]
T_target = [0., 0.25, 1.25]
# PG 1011 and 1012
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.05488, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
]
for i in [0, 1]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 1021 and 1022
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]), upper=np.log([0.05488, 0.1]))[0],
]
for i in [2, 3]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 2011 and 2012
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([9.80665, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, 9.80665, 0.1, 0.1]))[0],
]
for i in [4, 5]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 2021 and 2022
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, 9.80665, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6]),
upper=np.log([9.80665, np.inf, 0.1, 0.1]))[0],
]
for i in [6, 7]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# RED TAG
RED_check = A._DV_dict['red_tag'].describe().T
RED_check = (RED_check['mean'] * RED_check['count'] / 10000.).values
assert RED_check[0] == pytest.approx(RED_check[1], rel=0.01)
assert RED_check[2] == pytest.approx(RED_check[3], rel=0.01)
assert RED_check[4] == pytest.approx(RED_check[5], rel=0.01)
assert RED_check[6] == pytest.approx(RED_check[7], rel=0.01)
assert RED_check[0] == prob_approx(DMG_1_PID, 0.03)
assert RED_check[2] == prob_approx(DMG_2_PID, 0.03)
assert RED_check[4] == prob_approx(DMG_1_PFA, 0.03)
assert RED_check[6] == prob_approx(DMG_2_PFA, 0.03)
DMG_on = np.where(A._DMG > 0.0)[0]
RED_on = np.where(A._DV_dict['red_tag'] > 0.0)[0]
assert_allclose(DMG_on, RED_on)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
P_no_RED_target = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([9.80665, 9.80665, 0.05488, 0.05488]))[0]
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = ((1.0 - SD.loc[('red tagged', ''), 'mean'])
* SD.loc[('red tagged', ''), 'count'] / 10000.)
assert P_no_RED_target == prob_approx(P_no_RED_test, 0.04)
def test_FEMA_P58_Assessment_EDP_uncertainty_3D():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
In this test we look at the propagation of EDP values provided for two
different directions. (3D refers to the numerical model used for response
estimation.)
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_5.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_5.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
EDP_theta_target = [9.80665, 8.65433, 12.59198, 11.11239,
0.074081, 0.063763, 0.044932, 0.036788]
EDP_beta_target = [0.25, 0.25, 0.25, 0.25, 0.3, 0.3, 0.4, 0.4]
assert_allclose(EDP_theta_test, EDP_theta_target, rtol=0.05)
assert_allclose(EDP_beta_test, EDP_beta_target, rtol=0.1)
rho = RV_EDP[0].RV_set.Rho()
EDP_rho_test = rho
EDP_rho_target = np.array([
[1.0, 0.8, 0.6, 0.5, 0.3, 0.3, 0.3, 0.3],
[0.8, 1.0, 0.5, 0.6, 0.3, 0.3, 0.3, 0.3],
[0.6, 0.5, 1.0, 0.8, 0.3, 0.3, 0.3, 0.3],
[0.5, 0.6, 0.8, 1.0, 0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3, 1.0, 0.8, 0.7, 0.6],
[0.3, 0.3, 0.3, 0.3, 0.8, 1.0, 0.6, 0.7],
[0.3, 0.3, 0.3, 0.3, 0.7, 0.6, 1.0, 0.8],
[0.3, 0.3, 0.3, 0.3, 0.6, 0.7, 0.8, 1.0]])
large_rho_ids = np.where(EDP_rho_target >= 0.5)
small_rho_ids = np.where(EDP_rho_target < 0.5)
assert_allclose(EDP_rho_test[large_rho_ids], EDP_rho_target[large_rho_ids],
atol=0.1)
assert_allclose(EDP_rho_test[small_rho_ids], EDP_rho_target[small_rho_ids],
atol=0.2)
EDP_COV_test = EDP_rho_test * np.outer(EDP_beta_test, EDP_beta_test)
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
theta_PID = np.log(EDP_theta_target[4:])
COV_PID = EDP_COV_test[4:, 4:]
# COL
COL_check = A._COL.describe().T
col_target = 1.0 - mvn_od(theta_PID, COV_PID,
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0]
assert COL_check['mean'].values[0] == pytest.approx(col_target, rel=0.1, abs=0.05)
# DMG
realization_count = float(A._AIM_in['general']['realizations'])
DMG_check = [len(np.where(A._DMG.iloc[:, i] > 0.0)[0]) / realization_count for i in
range(8)]
DMG_1_1_PID = mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0]
DMG_1_2_PID = mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 1e-6, 1e-6]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0]
DMG_2_1_PID = mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 1e-6, 0.05488, 1e-6]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0]
DMG_2_2_PID = mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 1e-6, 1e-6, 0.05488]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0]
DMG_1_1_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6,
1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, np.inf, np.inf,
0.1, 0.1, 0.1, 0.1]))[0]
DMG_1_2_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6,
1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, np.inf, np.inf,
0.1, 0.1, 0.1, 0.1]))[0]
DMG_2_1_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 9.80665, 1e-6,
1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, np.inf, np.inf,
0.1, 0.1, 0.1, 0.1]))[0]
DMG_2_2_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 9.80665,
1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, np.inf, np.inf,
0.1, 0.1, 0.1, 0.1]))[0]
DMG_ref = [DMG_1_1_PID, DMG_1_2_PID, DMG_2_1_PID, DMG_2_2_PID,
DMG_1_1_PFA, DMG_1_2_PFA, DMG_2_1_PFA, DMG_2_2_PFA]
assert_allclose(DMG_check, DMG_ref, rtol=0.10, atol=0.01)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# COST
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
C_target = [0., 249., 624., 1251., 1875.]
T_target = [0., 0.249, 0.624, 1.251, 1.875]
# PG 1011
P_target = [
mvn_od(theta_PID, COV_PID, lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([0.05488, 0.1, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 0.05488, 0.05488]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 0.05488, 0.05488]),
upper=np.log([0.1, 0.05488, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 1e-6, 0.05488]),
upper=np.log([0.1, 0.1, 0.05488, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 0.05488, 1e-6]),
upper=np.log([0.1, 0.1, 0.1, 0.05488]))[0], ]),
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 1e-6, 0.05488]),
upper=np.log([0.1, 0.05488, 0.05488, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 1e-6, 1e-6]),
upper=np.log([0.1, 0.1, 0.05488, 0.05488]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 0.05488, 1e-6]),
upper=np.log([0.1, 0.05488, 0.1, 0.05488]))[0], ]),
mvn_od(theta_PID, COV_PID, lower=np.log([0.05488, 1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, 0.05488, 0.05488, 0.05488]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 0].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 0].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / realization_count
assert_allclose(P_target, P_test, atol=0.05)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 1012
P_target = [
mvn_od(theta_PID, COV_PID, lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, 0.05488, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 0.05488, 0.05488]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 0.05488, 0.05488]),
upper=np.log([0.05488, 0.1, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 1e-6, 0.05488]),
upper=np.log([0.1, 0.1, 0.05488, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 0.05488, 1e-6]),
upper=np.log([0.1, 0.1, 0.1, 0.05488]))[0], ]),
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 1e-6, 0.05488]),
upper=np.log([0.05488, 0.1, 0.05488, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 1e-6, 1e-6]),
upper=np.log([0.1, 0.1, 0.05488, 0.05488]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 0.05488, 1e-6]),
upper=np.log([0.05488, 0.1, 0.1, 0.05488]))[0], ]),
mvn_od(theta_PID, COV_PID, lower=np.log([1e-6, 0.05488, 1e-6, 1e-6]),
upper=np.log([0.05488, 0.1, 0.05488, 0.05488]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 1].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 1].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / realization_count
assert_allclose(P_target, P_test, atol=0.05)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 1021
P_target = [
mvn_od(theta_PID, COV_PID, lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, 0.1, 0.05488, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 0.05488, 0.05488]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 0.05488, 0.05488]),
upper=np.log([0.05488, 0.1, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 0.05488, 0.05488]),
upper=np.log([0.1, 0.05488, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 0.05488, 1e-6]),
upper=np.log([0.1, 0.1, 0.1, 0.05488]))[0], ]),
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 1e-6, 0.05488, 0.05488]),
upper=np.log([0.05488, 0.05488, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 0.05488, 1e-6]),
upper=np.log([0.1, 0.05488, 0.1, 0.05488]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 0.05488, 1e-6]),
upper=np.log([0.05488, 0.1, 0.1, 0.05488]))[0], ]),
mvn_od(theta_PID, COV_PID, lower=np.log([1e-6, 1e-6, 0.05488, 1e-6]),
upper=np.log([0.05488, 0.05488, 0.1, 0.05488]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 2].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 2].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / realization_count
#print('------------------------')
#print('P_target')
#print(P_target)
#print('------------------------')
assert_allclose(P_target, P_test, atol=0.05)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 1022
P_target = [
mvn_od(theta_PID, COV_PID, lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, 0.1, 0.1, 0.05488]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 0.05488, 0.05488]),
upper=np.log([0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 0.05488, 0.05488]),
upper=np.log([0.05488, 0.1, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 0.05488, 0.05488]),
upper=np.log([0.1, 0.05488, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 0.05488, 1e-6, 0.05488]),
upper=np.log([0.1, 0.1, 0.05488, 0.1]))[0], ]),
np.sum([
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 1e-6, 0.05488, 0.05488]),
upper=np.log([0.05488, 0.05488, 0.1, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([0.05488, 1e-6, 1e-6, 0.05488]),
upper=np.log([0.1, 0.05488, 0.05488, 0.1]))[0],
mvn_od(theta_PID, COV_PID,
lower=np.log([1e-6, 0.05488, 1e-6, 0.05488]),
upper=np.log([0.05488, 0.1, 0.05488, 0.1]))[0], ]),
mvn_od(theta_PID, COV_PID, lower=np.log([1e-6, 1e-6, 1e-6, 0.05488]),
upper=np.log([0.05488, 0.05488, 0.05488, 0.1]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 3].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 5)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 3].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 5)]
P_test = P_test[np.where(P_test > 5)]
P_test = P_test / realization_count
assert_allclose(P_target[:-1], P_test[:4], atol=0.05)
assert_allclose(C_target[:-1], C_test[:4], rtol=0.001)
assert_allclose(T_target[:-1], T_test[:4], rtol=0.001)
# PG 2011
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, 9.80665, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, 9.80665, 9.80665, 0.1, 0.1, 0.1, 0.1]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 4].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 4].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / realization_count
assert_allclose(P_target, P_test, atol=0.05)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 2012
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, 9.80665, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, 9.80665, 9.80665, 0.1, 0.1, 0.1, 0.1]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 5].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 5].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / realization_count
assert_allclose(P_target[:4], P_test[:4], atol=0.05)
assert_allclose(C_target[:4], C_test[:4], rtol=0.001)
assert_allclose(T_target[:4], T_test[:4], rtol=0.001)
# PG 2021
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 1e-6, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, 9.80665, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, 9.80665, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 6].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 6].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / realization_count
assert_allclose(P_target, P_test, atol=0.05)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 2022
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, 9.80665, 0.1, 0.1, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 9.80665, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, np.inf, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
np.sum([
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 1e-6, 9.80665, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, 9.80665, np.inf, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[9.80665, 1e-6, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[np.inf, 9.80665, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 9.80665, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, np.inf, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[
0], ]),
mvn_od(np.log(EDP_theta_test), EDP_COV_test, lower=np.log(
[1e-6, 1e-6, 1e-6, 9.80665, 1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[9.80665, 9.80665, 9.80665, np.inf, 0.1, 0.1, 0.1, 0.1]))[0],
]
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, 7].values / 3., decimals=0) * 3.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(np.around(DV_TIME.iloc[:, 7].values * 333.33333,
decimals=0) / 333.33333,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / realization_count
assert_allclose(P_target, P_test, atol=0.05)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# RED TAG
RED_check = A._DV_dict['red_tag'].describe().T
RED_check = (RED_check['mean'] * RED_check['count'] / realization_count).values
assert_allclose(RED_check, DMG_ref, atol=0.02, rtol=0.10)
DMG_on = np.where(A._DMG > 0.0)[0]
RED_on = np.where(A._DV_dict['red_tag'] > 0.0)[0]
assert_allclose(DMG_on, RED_on)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
P_no_RED_target = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
upper=np.log(
[9.80665, 9.80665, 9.80665, 9.80665, 0.05488,
0.05488, 0.05488, 0.05488]))[0]
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = (1.0 - SD.loc[('red tagged', ''), 'mean']) * SD.loc[
('red tagged', ''), 'count'] / realization_count
assert P_no_RED_target == pytest.approx(P_no_RED_test, abs=0.03)
def test_FEMA_P58_Assessment_EDP_uncertainty_single_sample():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
In this test we provide only one structural response result and see if it
is properly handled as a deterministic value or a random EDP using the
additional sources of uncertainty.
"""
print()
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_6.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_6.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
EDP_theta_target = np.array(
[7.634901, 6.85613, 11.685934, 10.565554,
0.061364, 0.048515, 0.033256, 0.020352])
EDP_beta_target = EDP_theta_target * 1e-6
assert_allclose(EDP_theta_test, EDP_theta_target, rtol=0.05)
assert_allclose(EDP_beta_test, EDP_beta_target, rtol=0.1)
assert RV_EDP[0].RV_set == None
# ------------------------------------------------- perform the calculation
A.define_loss_model()
A.calculate_damage()
A.calculate_losses()
A.aggregate_results()
# ------------------------------------------------ check result aggregation
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = (1.0 - SD.loc[('red tagged', ''), 'mean']) * SD.loc[
('red tagged', ''), 'count'] / 10000.
assert P_no_RED_test == 0.0
# -------------------------------------------------------------------------
# now do the same analysis, but consider additional uncertainty
# -------------------------------------------------------------------------
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
AU = A._AIM_in['general']['added_uncertainty']
AU['beta_m'] = 0.3
AU['beta_gm'] = 0.4
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
EDP_beta_target = np.sqrt((EDP_theta_target * 1e-6)**2. +
np.ones(8)*(0.3**2. + 0.4**2.))
assert_allclose(EDP_theta_test, EDP_theta_target, rtol=0.05)
assert_allclose(EDP_beta_test, EDP_beta_target, rtol=0.1)
assert RV_EDP[0].RV_set == None
EDP_rho_target = np.zeros((8, 8))
np.fill_diagonal(EDP_rho_target, 1.0)
EDP_COV_test = EDP_rho_target * np.outer(EDP_beta_test, EDP_beta_test)
# ------------------------------------------------- perform the calculation
A.define_loss_model()
A.calculate_damage()
A.calculate_losses()
A.aggregate_results()
# ------------------------------------------------ check result aggregation
P_no_RED_target = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
upper=np.log(
[9.80665, 9.80665, 9.80665, 9.80665, 0.05488,
0.05488, 0.05488, 0.05488]))[0]
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = (1.0 - SD.loc[('red tagged', ''), 'mean']) * SD.loc[
('red tagged', ''), 'count'] / 10000.
assert P_no_RED_target == pytest.approx(P_no_RED_test, abs=0.01)
def test_FEMA_P58_Assessment_EDP_uncertainty_zero_variance():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
This test simulates a scenario when one of the EDPs is identical in all
of the available samples. This results in zero variance in that dimension
and the purpose of the test is to ensure that such cases are handled
appropriately.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_7.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_7.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
assert EDP_theta_test[4] == pytest.approx(0.061364, rel=0.05)
assert EDP_beta_test[4] < 0.061364 * 1e-3
rho = RV_EDP[0].RV_set.Rho()
EDP_rho_test = rho
EDP_rho_target = np.zeros((8, 8))
np.fill_diagonal(EDP_rho_target, 1.0)
assert_allclose(EDP_rho_test[4], EDP_rho_target[4], atol=1e-6)
# ------------------------------------------------- perform the calculation
A.define_loss_model()
A.calculate_damage()
A.calculate_losses()
A.aggregate_results()
# ------------------------------------------------ check result aggregation
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = (1.0 - SD.loc[('red tagged', ''), 'mean']) * SD.loc[
('red tagged', ''), 'count'] / 10000.
assert P_no_RED_test == 0.0
def test_FEMA_P58_Assessment_QNT_uncertainty_independent():
"""
Perform loss assessment with customized inputs that focus on testing the
propagation of uncertainty in component quantities. Dispersions in other
calculation parameters are reduced to negligible levels. This allows us to
test the results against pre-defined reference values in spite of the
randomness involved in the calculations.
This test assumes that component quantities are independent.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_8.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_8.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# QNT
RV_QNT = list(A._QNT_dict.values())
QNT_theta_test, QNT_beta_test = np.array([rv.theta for rv in RV_QNT]).T
QNT_theta_target = np.ones(8) * 25.
QNT_beta_target = [25.0] * 4 + [0.4] * 4
assert_allclose(QNT_theta_test, QNT_theta_target, rtol=0.001)
assert_allclose(QNT_beta_test, QNT_beta_target, rtol=0.001)
for i in range(4):
assert RV_QNT[i].distribution == 'normal'
for i in range(4, 8):
assert RV_QNT[i].distribution == 'lognormal'
QNT_rho_target = [
[1, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 1],
]
QNT_rho_test = RV_QNT[0].RV_set.Rho()
assert_allclose(QNT_rho_test, QNT_rho_target, atol=0.001)
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# COL
# there shall be no collapses
assert A._COL.describe().T['mean'].values == 0
# DMG
DMG_check = A._DMG.describe().T
mu_test = DMG_check['mean']
sig_test = DMG_check['std']
rho_test = A._DMG.corr()
mu_target_1 = 25.0 + 25.0 * norm.pdf(-1.0) / (1.0 - norm.cdf(-1.0))
sig_target_1 = np.sqrt(25.0 ** 2.0 * (
1 - norm.pdf(-1.0) / (1.0 - norm.cdf(-1.0)) - (
norm.pdf(-1.0) / (1.0 - norm.cdf(-1.0))) ** 2.0))
mu_target_2 = np.exp(np.log(25.0) + 0.4 ** 2. / 2.)
sig_target_2 = np.sqrt(
(np.exp(0.4 ** 2.0) - 1.0) * np.exp(2 * np.log(25.0) + 0.4 ** 2.0))
assert_allclose(mu_test[:4], mu_target_1, rtol=0.05)
assert_allclose(mu_test[4:], mu_target_2, rtol=0.05)
assert_allclose(sig_test[:4], sig_target_1, rtol=0.05)
assert_allclose(sig_test[4:], sig_target_2, rtol=0.05)
assert_allclose(rho_test, QNT_rho_target, atol=0.05)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
DV_COST = A._DV_dict['rec_cost'] / A._DMG
rho_DV_target = [
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
]
assert_allclose(DV_COST.corr(), rho_DV_target, atol=0.05)
# Uncertainty in decision variables is controlled by the correlation
# between damages
RND = [tnorm.rvs(-1., np.inf, loc=25, scale=25, size=10000) for i in
range(4)]
RND = np.sum(RND, axis=0)
P_target_PID = np.sum(RND > 90.) / 10000.
P_test_PID = np.sum(DV_COST.iloc[:, 0] < 10.01) / 10000.
assert P_target_PID == pytest.approx(P_test_PID, rel=0.02)
RND = [np.exp(norm.rvs(loc=np.log(25.), scale=0.4, size=10000)) for i in
range(4)]
RND = np.sum(RND, axis=0)
P_target_PFA = np.sum(RND > 90.) / 10000.
P_test_PFA = np.sum(DV_COST.iloc[:, 4] < 10.01) / 10000.
assert P_target_PFA == pytest.approx(P_test_PFA, rel=0.02)
# the same checks can be performed for reconstruction time
DV_TIME = A._DV_dict['rec_time'] / A._DMG
assert_allclose(DV_TIME.corr(), rho_DV_target, atol=0.05)
P_test_PID = np.sum(DV_TIME.iloc[:, 0] < 0.0101) / 10000.
assert P_target_PID == pytest.approx(P_test_PID, rel=0.02)
P_test_PFA = np.sum(DV_TIME.iloc[:, 4] < 0.0101) / 10000.
assert P_target_PFA == pytest.approx(P_test_PFA, rel=0.02)
# injuries...
DV_INJ_dict = deepcopy(A._DV_dict['injuries'])
DV_INJ0 = (DV_INJ_dict[0] / A._DMG).describe()
DV_INJ1 = (DV_INJ_dict[1] / A._DMG).describe()
assert_allclose(DV_INJ0.loc['mean', :][:4], np.ones(4) * 0.025, rtol=0.001)
assert_allclose(DV_INJ0.loc['mean', :][4:], np.ones(4) * 0.1, rtol=0.001)
assert_allclose(DV_INJ1.loc['mean', :][:4], np.ones(4) * 0.005, rtol=0.001)
assert_allclose(DV_INJ1.loc['mean', :][4:], np.ones(4) * 0.02, rtol=0.001)
assert_allclose(DV_INJ0.loc['std', :], np.zeros(8), atol=1e-4)
assert_allclose(DV_INJ1.loc['std', :], np.zeros(8), atol=1e-4)
# and for red tag...
# Since every component is damaged in every realization, the red tag
# results should all be 1.0
assert_allclose(A._DV_dict['red_tag'], np.ones((10000, 8)))
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
S = A._SUMMARY
SD = S.describe().T
assert SD.loc[('inhabitants', ''), 'mean'] == 20.0
assert SD.loc[('inhabitants', ''), 'std'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'mean'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'std'] == 0.0
assert SD.loc[('red tagged', ''), 'mean'] == 1.0
assert SD.loc[('red tagged', ''), 'std'] == 0.0
assert np.corrcoef(S.loc[:, ('reconstruction', 'cost')],
S.loc[:, ('reconstruction', 'time-sequential')])[
0, 1] == pytest.approx(1.0)
assert_allclose(A._DV_dict['rec_cost'].sum(axis=1),
S.loc[:, ('reconstruction', 'cost')])
assert_allclose(A._DV_dict['rec_time'].sum(axis=1),
S.loc[:, ('reconstruction', 'time-sequential')])
assert_allclose(A._DV_dict['rec_time'].max(axis=1),
S.loc[:, ('reconstruction', 'time-parallel')])
assert_allclose(A._DV_dict['injuries'][0].sum(axis=1),
S.loc[:, ('injuries', 'sev1')])
assert_allclose(A._DV_dict['injuries'][1].sum(axis=1),
S.loc[:, ('injuries', 'sev2')])
def test_FEMA_P58_Assessment_QNT_uncertainty_dependencies():
"""
Perform loss assessment with customized inputs that focus on testing the
propagation of uncertainty in component quantities. Dispersions in other
calculation parameters are reduced to negligible levels. This allows us to
test the results against pre-defined reference values in spite of the
randomness involved in the calculations.
This test checks if dependencies between component quantities are handled
appropriately.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_8.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_8.out"
for dep in ['FG', 'PG', 'DIR', 'LOC']:
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A._AIM_in['dependencies']['quantities'] = dep
A.define_random_variables()
# ---------------------------------------------- check random variables
# QNT
RV_QNT = list(A._QNT_dict.values())
QNT_theta_test, QNT_beta_test = np.array([rv.theta for rv in RV_QNT]).T
QNT_theta_target = np.ones(8) * 25.
QNT_beta_target = [25.0] * 4 + [0.4] * 4
assert_allclose(QNT_theta_test, QNT_theta_target, rtol=0.001)
assert_allclose(QNT_beta_test, QNT_beta_target, rtol=0.001)
for i in range(4):
assert RV_QNT[i].distribution == 'normal'
for i in range(4, 8):
assert RV_QNT[i].distribution == 'lognormal'
if dep == 'FG':
QNT_rho_target = np.array([
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
])
elif dep == 'PG':
QNT_rho_target = np.array([
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
])
elif dep == 'DIR':
QNT_rho_target = np.array([
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
])
elif dep == 'LOC':
QNT_rho_target = np.array([
[1, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 1, 0, 1],
[0, 0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 1, 0, 1],
])
QNT_rho_test = RV_QNT[0].RV_set.Rho()
assert_allclose(QNT_rho_test, QNT_rho_target, atol=0.001)
# ---------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# -------------------------------------------- check damage calculation
# COL
# there shall be no collapses
assert A._COL.describe().T['mean'].values == 0
# DMG
# Because the correlations are enforced after truncation, the marginals
# shall be unaffected by the correlation structure. Hence, the
# distribution of damaged quantities within a PG shall be identical in
# all dep cases.
# The specified dependencies are apparent in the correlation between
# damaged quantities in various PGs.
DMG_check = A._DMG.describe().T
mu_test = DMG_check['mean']
sig_test = DMG_check['std']
rho_test = A._DMG.corr()
mu_target_1 = 25.0 + 25.0 * norm.pdf(-1.0) / (1.0 - norm.cdf(-1.0))
sig_target_1 = np.sqrt(25.0 ** 2.0 * (
1 - norm.pdf(-1.0) / (1.0 - norm.cdf(-1.0)) - (
norm.pdf(-1.0) / (1.0 - norm.cdf(-1.0))) ** 2.0))
mu_target_2 = np.exp(np.log(25.0) + 0.4 ** 2. / 2.)
sig_target_2 = np.sqrt(
(np.exp(0.4 ** 2.0) - 1.0) * np.exp(2 * np.log(25.0) + 0.4 ** 2.0))
assert_allclose(mu_test[:4], mu_target_1, rtol=0.05)
assert_allclose(mu_test[4:], mu_target_2, rtol=0.05)
assert_allclose(sig_test[:4], sig_target_1, rtol=0.05)
assert_allclose(sig_test[4:], sig_target_2, rtol=0.05)
assert_allclose(rho_test, QNT_rho_target, atol=0.05)
# ---------------------------------------------------------------------
A.calculate_losses()
# ---------------------------------------------- check loss calculation
DV_COST = A._DV_dict['rec_cost'] / A._DMG
# After the DVs are normalized by the damaged quantities, the resulting
# samples show the correlations between the DV_measure (such as
# reconstruction cost) / 1 unit of damaged component. Because this
# consequences are perfectly correlated among the components of a
# fragility group by definition, the quadrants on the main diagonal
# will follow the matrix presented below. If there are additional
# correlations defined between component quantities in different
# fragility groups (i.e. the off-diagonal quadrants of the rho matrix),
# those will be preserved in the consequences. Therefore, the
# off-diagonal quadrants need to be updated with those from QNT_rho_target
# to get an appropriate rho_DV_target.
rho_DV_target = np.array([
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1],
])
rho_DV_target[:4, 4:] = QNT_rho_target[:4, 4:]
rho_DV_target[4:, :4] = QNT_rho_target[:4, 4:]
assert_allclose(DV_COST.corr(), rho_DV_target, atol=0.05)
# uncertainty in decision variables is controlled by the correlation
# between damages
P_test_PID = np.sum(DV_COST.iloc[:, 0] < 10.01) / 10000.
P_test_PFA = np.sum(DV_COST.iloc[:, 4] < 10.01) / 10000.
# the first component quantities follow a truncated multivariate normal
# distribution
mu_target_PID = mu_target_1 * 4.
sig_target_PID = np.sqrt(
sig_target_1 ** 2. * np.sum(QNT_rho_target[:4, :4]))
mu_target_PID_b = mu_target_PID
sig_target_PID_b = sig_target_PID
alpha = 100.
i = 0
while (np.log(
np.abs(alpha / (mu_target_PID_b / sig_target_PID_b))) > 0.001) and (
i < 10):
alpha = -mu_target_PID_b / sig_target_PID_b
mu_target_PID_b = mu_target_PID - sig_target_PID_b * norm.pdf(
alpha) / (1.0 - norm.cdf(alpha))
sig_target_PID_b = sig_target_PID / np.sqrt(
(1.0 + alpha * norm.pdf(alpha) / (1.0 - norm.cdf(alpha))))
i += 1
xi = (90 - mu_target_PID_b) / sig_target_PID_b
P_target_PID = 1.0 - (norm.cdf(xi) - norm.cdf(alpha)) / (
1.0 - norm.cdf(alpha))
assert P_target_PID == pytest.approx(P_test_PID, rel=0.05)
# the second component quantities follow a multivariate lognormal
# distribution
mu_target_PFA = mu_target_2 * 4.
sig_target_PFA = np.sqrt(
sig_target_2 ** 2. * np.sum(QNT_rho_target[4:, 4:]))
sig_target_PFA_b = np.sqrt(
np.log(sig_target_PFA ** 2.0 / mu_target_PFA ** 2.0 + 1.0))
mu_target_PFA_b = np.log(mu_target_PFA) - sig_target_PFA_b ** 2.0 / 2.
xi = np.log(90)
P_target_PFA = 1.0 - norm.cdf(xi, loc=mu_target_PFA_b,
scale=sig_target_PFA_b)
assert P_target_PFA == pytest.approx(P_test_PFA, rel=0.05)
# the same checks can be performed for reconstruction time
DV_TIME = A._DV_dict['rec_time'] / A._DMG
assert_allclose(DV_TIME.corr(), rho_DV_target, atol=0.05)
P_test_PID = np.sum(DV_TIME.iloc[:, 0] < 0.0101) / 10000.
assert P_target_PID == pytest.approx(P_test_PID, rel=0.05)
P_test_PFA = np.sum(DV_TIME.iloc[:, 4] < 0.0101) / 10000.
assert P_target_PFA == pytest.approx(P_test_PFA, rel=0.05)
# injuries...
# Every component is damaged in every realization in this test. Once
# normalized by the quantity of components, the number of injuries
# shall be identical and unaffected by the correlation between
# component quantities.
DV_INJ_dict = deepcopy(A._DV_dict['injuries'])
DV_INJ0 = (DV_INJ_dict[0] / A._DMG).describe()
DV_INJ1 = (DV_INJ_dict[1] / A._DMG).describe()
assert_allclose(DV_INJ0.loc['mean', :][:4], np.ones(4) * 0.025,
rtol=0.001)
assert_allclose(DV_INJ0.loc['mean', :][4:], np.ones(4) * 0.1,
rtol=0.001)
assert_allclose(DV_INJ1.loc['mean', :][:4], np.ones(4) * 0.005,
rtol=0.001)
assert_allclose(DV_INJ1.loc['mean', :][4:], np.ones(4) * 0.02,
rtol=0.001)
assert_allclose(DV_INJ0.loc['std', :], np.zeros(8), atol=1e-4)
assert_allclose(DV_INJ1.loc['std', :], np.zeros(8), atol=1e-4)
# and for red tag...
# since every component is damaged in every realization, the red tag
# results should all be 1.0
assert_allclose(A._DV_dict['red_tag'], np.ones((10000, 8)))
# ---------------------------------------------------------------------
A.aggregate_results()
# -------------------------------------------- check result aggregation
S = A._SUMMARY
SD = S.describe().T
assert SD.loc[('inhabitants', ''), 'mean'] == 20.0
assert SD.loc[('inhabitants', ''), 'std'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'mean'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'std'] == 0.0
assert SD.loc[('red tagged', ''), 'mean'] == 1.0
assert SD.loc[('red tagged', ''), 'std'] == 0.0
assert np.corrcoef(S.loc[:, ('reconstruction', 'cost')],
S.loc[:, ('reconstruction', 'time-sequential')])[
0, 1] == pytest.approx(1.0)
assert_allclose(A._DV_dict['rec_cost'].sum(axis=1),
S.loc[:, ('reconstruction', 'cost')])
assert_allclose(A._DV_dict['rec_time'].sum(axis=1),
S.loc[:, ('reconstruction', 'time-sequential')])
assert_allclose(A._DV_dict['rec_time'].max(axis=1),
S.loc[:, ('reconstruction', 'time-parallel')])
assert_allclose(A._DV_dict['injuries'][0].sum(axis=1),
S.loc[:, ('injuries', 'sev1')])
assert_allclose(A._DV_dict['injuries'][1].sum(axis=1),
S.loc[:, ('injuries', 'sev2')])
def test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies(dep='IND'):
"""
Perform loss assessment with customized inputs that focus on testing the
propagation of uncertainty in component fragilities. Dispersions in other
calculation parameters are reduced to negligible levels. This allows us to
test the results against pre-defined reference values in spite of the
randomness involved in the calculations.
"""
print()
idx = pd.IndexSlice
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_9.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_9.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A._AIM_in['dependencies']['fragilities'] = dep
A.define_random_variables()
# ---------------------------------------------- check random variables
RV_FF = list(A._FF_dict.values())
fr_names = np.unique([rv.name[3:12] for rv in RV_FF])
fr_keys = {}
for fr_name in fr_names:
fr_list = [rv.name for rv in RV_FF if fr_name in rv.name]
fr_keys.update({fr_name: fr_list})
# fr_keys = []
# for key in A._RV_dict.keys():
# if 'FR' in key:
# fr_keys.append(key)
dimtag_target = [4 * 2 * 3, 20 * 2 * 3 * 3, 20 * 2 * 3 * 3,
20 * 2 * 3 * 3]
theta_target = [[0.048, 0.096], [0.048, 0.072, 0.096],
[2.9419, 5.8840, 11.7680], [2.9419, 5.8840, 11.7680]]
sig_target = [[0.5, 0.25], [1.0, 0.5, 0.25], [1.0, 0.5, 0.25],
[1.0, 0.5, 0.25]]
if dep == 'IND':
rho_target = np.zeros((24, 24))
np.fill_diagonal(rho_target, 1.0)
rho_sum = 360
elif dep == 'PG':
rho_target = np.ones((24, 24))
rho_sum = 360 ** 2.
elif dep == 'DIR':
rho_target = [
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.]]
rho_sum = (20 * 2 * 3) ** 2. * 3
elif dep == 'LOC':
rho_target = [
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 1., 1.]]
rho_sum = (20 * 3) ** 2. * (2 * 9)
elif dep in ['ATC', 'CSG']:
rho_target = [
[1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.]]
rho_sum = (20 * 3) ** 2. * (2 * 3)
elif dep == 'DS':
rho_target = [
[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.]]
rho_sum = 3 ** 2 * (20 * 2 * 3)
for k, key in enumerate(sorted(fr_keys.keys())):
RV_FF_i = [A._FF_dict[rv_i] for rv_i in fr_keys[key]]
assert len(RV_FF_i) == dimtag_target[k]
FF_theta_test, FF_beta_test = np.array([rv.theta for rv in RV_FF_i]).T
if k == 0:
FF_theta_test = pd.DataFrame(
np.reshape(FF_theta_test, (12, 2))).describe()
FF_beta_test = pd.DataFrame(
np.reshape(FF_beta_test, (12, 2))).describe()
else:
FF_theta_test = pd.DataFrame(
np.reshape(FF_theta_test, (120, 3))).describe()
FF_beta_test = pd.DataFrame(
np.reshape(FF_beta_test, (120, 3))).describe()
assert_allclose(FF_theta_test.loc['mean', :].values, theta_target[k],
rtol=1e-4)
assert_allclose(FF_theta_test.loc['std', :].values,
np.zeros(np.array(theta_target[k]).shape),
atol=1e-10)
assert_allclose(FF_beta_test.loc['mean', :].values, sig_target[k],
rtol=1e-4)
assert_allclose(FF_beta_test.loc['std', :].values,
np.zeros(np.array(sig_target[k]).shape), atol=1e-10)
rho_test = RV_FF_i[0].RV_set.Rho(fr_keys[fr_names[k]])
if k == 0:
# we perform the detailed verification of rho for the first case
# only (because the others are 360x360 matrices)
assert_allclose(rho_test, rho_target)
else:
# for the other cases we check the number of ones in the matrix
assert np.sum(rho_test) == rho_sum
# RV_FR = deepcopy(A._RV_dict[key])
# assert len(RV_FR._dimension_tags) == dimtag_target[k]
#
# COV_test = RV_FR.COV
# sig_test = np.sqrt(np.diagonal(COV_test))
# rho_test = COV_test / np.outer(sig_test, sig_test)
#
# if k == 0:
# theta_test = pd.DataFrame(
# np.reshape(RV_FR.theta, (12, 2))).describe()
# sig_test = pd.DataFrame(
# np.reshape(sig_test, (12, 2))).describe()
# else:
# theta_test = pd.DataFrame(
# np.reshape(RV_FR.theta, (120, 3))).describe()
# sig_test = pd.DataFrame(
# np.reshape(sig_test, (120, 3))).describe()
#
# assert_allclose(theta_test.loc['mean', :].values, theta_target[k],
# rtol=1e-4)
# assert_allclose(theta_test.loc['std', :].values,
# np.zeros(np.array(theta_target[k]).shape),
# atol=1e-10)
#
# assert_allclose(sig_test.loc['mean', :].values, sig_target[k],
# rtol=1e-4)
# assert_allclose(sig_test.loc['std', :].values,
# np.zeros(np.array(sig_target[k]).shape), atol=1e-10)
#
# if k == 0:
# # we perform the detailed verification of rho for the first case
# # only (because the others are 360x360 matrices)
# assert_allclose(rho_test, rho_target)
#
# else:
# # for the other cases we check the number of ones in the matrix
# assert np.sum(rho_test) == rho_sum
# ---------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# -------------------------------------------- check damage calculation
# COL
# there shall be no collapses
assert A._COL.describe().T['mean'].values == 0
# DMG
DMG_check = A._DMG
# start with checking the damage correlations
for k in range(4):
DMG_corr = DMG_check.loc[:, idx[k + 1, :, :]].corr()
if k == 0:
DMG_corr = DMG_corr.iloc[:8, :8]
if dep in ['IND', 'ATC', 'CSG', 'DS']:
DMG_corr_ref = np.array([
[ 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 1.0,-0.1, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0],
])
elif dep == 'PG':
DMG_corr_ref = np.array([
[ 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1],
[-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0],
[ 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1],
[-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0],
[ 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1],
[-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0],
[ 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1],
[-0.1, 1.0,-0.1, 1.0,-0.1, 1.0,-0.1, 1.0],
])
elif dep == 'DIR':
DMG_corr_ref = np.array([
[ 1.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0],
[ 1.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 1.0,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 1.0],
[ 0.0, 0.0, 0.0, 0.0, 1.0,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 1.0],
])
elif dep == 'LOC':
DMG_corr_ref = np.array([
[ 1.0,-0.1, 0.0, 0.0, 1.0,-0.1, 0.0, 0.0],
[-0.1, 1.0, 0.0, 0.0,-0.1, 1.0, 0.0, 0.0],
[ 0.0, 0.0, 1.0,-0.1, 0.0, 0.0, 1.0,-0.1],
[ 0.0, 0.0,-0.1, 1.0, 0.0, 0.0,-0.1, 1.0],
[ 1.0,-0.1, 0.0, 0.0, 1.0,-0.1, 0.0, 0.0],
[-0.1, 1.0, 0.0, 0.0,-0.1, 1.0, 0.0, 0.0],
[ 0.0, 0.0, 1.0,-0.1, 0.0, 0.0, 1.0,-0.1],
[ 0.0, 0.0,-0.1, 1.0, 0.0, 0.0,-0.1, 1.0],
])
if k == 1:
DMG_corr = DMG_corr.iloc[:12, :12]
if dep in ['IND', 'ATC', 'CSG', 'DS']:
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0],
])
elif dep == 'PG':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1],
[-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1],
[-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1],
[-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1],
[-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1],
[-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1],
[-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1],
[-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1],
[-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0],
])
elif dep == 'DIR':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1, 1.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 1.0,-0.1,-0.1, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1, 1.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 1.0,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0,-0.1,-0.1, 1.0],
])
elif dep == 'LOC':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0],
[-0.1,-0.1, 1.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1],
[ 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0],
[-0.1,-0.1, 1.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1],
[ 0.0, 0.0, 0.0,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0],
])
if k == 2:
DMG_corr = DMG_corr.iloc[:20, :20]
if dep in ['IND', 'DS']:
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1, 1.0,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep == 'PG':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 1.0, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1],
[-0.1, 0.5, 1.0, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1],
[-0.1, 0.5, 0.5, 1.0,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 1.0, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1],
[-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 1.0, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1],
[-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 1.0,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 1.0, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1],
[-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 1.0, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1],
[-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 1.0,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 1.0, 0.5, 0.5,-0.1],
[-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 1.0, 0.5,-0.1],
[-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 1.0,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep == 'DIR':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 1.0, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 0.5, 1.0,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 1.0, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 1.0, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.5, 0.5,-0.1,-0.1, 0.8, 0.5, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 1.0, 0.5,-0.1,-0.1, 0.5, 0.6, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 1.0,-0.1,-0.1, 0.5, 0.5, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.5, 0.5,-0.1,-0.1, 1.0, 0.5, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.6, 0.5,-0.1,-0.1, 0.5, 1.0, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 0.5,-0.1,-0.1, 0.5, 0.5, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep == 'LOC':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 1.0, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.6, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 0.5, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.5, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 1.0, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.6, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 0.6, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 1.0, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.5, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.6, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 1.0, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep in ['ATC', 'CSG']:
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 1.0, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.5, 0.5, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 1.0, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.5, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 1.0, 0.5,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.5, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 1.0, 0.5,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.5, 0.5, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
if k == 3:
DMG_corr = DMG_corr.iloc[:20, :20]
if dep in ['IND', 'DS']:
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.0, 0.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.0, 1.0, 0.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.0, 0.0, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.0, 0.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.0, 1.0, 0.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.0, 0.0, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.0, 0.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.0, 1.0, 0.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.0, 0.0, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.0, 0.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.0, 1.0, 0.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.0, 0.0, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep == 'PG':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 1.0, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1],
[-0.1, 0.8, 1.0, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1],
[-0.1, 0.7, 0.6, 1.0,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 1.0, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1],
[-0.1, 0.8, 0.6, 0.6,-0.1,-0.1, 0.8, 1.0, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1],
[-0.1, 0.7, 0.6, 0.5,-0.1,-0.1, 0.7, 0.6, 1.0,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 1.0, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1],
[-0.1, 0.8, 0.7, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1,-0.1, 0.8, 1.0, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1],
[-0.1, 0.7, 0.6, 0.6,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1,-0.1, 0.7, 0.6, 1.0,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 1.0, 0.8, 0.7,-0.1],
[-0.1, 0.8, 0.7, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1,-0.1, 0.8, 0.6, 0.6,-0.1,-0.1, 0.8, 1.0, 0.6,-0.1],
[-0.1, 0.7, 0.6, 0.6,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1,-0.1, 0.7, 0.6, 0.5,-0.1,-0.1, 0.7, 0.6, 1.0,-0.1],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep == 'DIR':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 1.0, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.7, 0.6, 1.0,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 1.0, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 0.6, 0.6,-0.1,-0.1, 0.8, 1.0, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.7, 0.6, 0.5,-0.1,-0.1, 0.7, 0.6, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.8, 0.7,-0.1,-0.1, 0.8, 0.8, 0.7,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 1.0, 0.6,-0.1,-0.1, 0.8, 0.7, 0.6,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 1.0,-0.1,-0.1, 0.7, 0.6, 0.6,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.8, 0.7,-0.1,-0.1, 1.0, 0.8, 0.7,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.6, 0.6,-0.1,-0.1, 0.8, 1.0, 0.6,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 0.5,-0.1,-0.1, 0.7, 0.6, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep == 'LOC':
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 1.0, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.7, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.7, 0.6, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.8, 0.7,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 1.0, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.7, 0.6,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 0.6,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 0.7, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 1.0, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.7, 0.6, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.8, 0.7,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 0.7, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 1.0, 0.6,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
elif dep in ['ATC', 'CSG']:
DMG_corr_ref = np.array([
[ 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 1.0, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.8, 1.0, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1, 0.7, 0.6, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 1.0, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.8, 0.7,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 1.0, 0.6,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 1.0,-0.1, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-0.1,-0.1,-0.1,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 1.0, 0.8, 0.7,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.8, 1.0, 0.6,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1, 0.7, 0.6, 1.0,-0.1],
[ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-0.1,-0.1,-0.1,-0.1, 1.0],
])
for i in range(len(DMG_corr.index)):
for j in range(len(DMG_corr.columns)):
ref_i = DMG_corr_ref[i, j]
if ref_i != 0.0:
if ref_i > 0.0:
assert DMG_corr.iloc[i, j] > 0.97 * ref_i
else:
assert DMG_corr.iloc[i, j] < 0.0
else:
assert DMG_corr.iloc[i, j] == pytest.approx(ref_i,
abs=0.15)
# then check the distribution of damage within each performance group
EDP_list = np.array(
[[[0.080000, 0.080000], [0.080000, 0.080000], [0.040000, 0.040000]],
[[7.845320, 7.845320], [7.845320, 7.845320],
[2.942000, 2.942000]]])
fr_keys = []
for key in A._RV_dict.keys():
if 'FR' in key:
fr_keys.append(key)
for k, key in enumerate(sorted(fr_keys)):
# print(key)
RV_FR = A._RV_dict[key]
# only third of the data is unique because of the 3 stories
rel_len = int(len(RV_FR._dimension_tags) / 3)
COV_test = RV_FR.COV[:rel_len, :rel_len]
theta_test = RV_FR.theta[:rel_len]
lims = np.unique(theta_test)
ndims = len(lims)
if k in [2, 3]:
ndims += 2
if (dep in ['DS', 'IND']) or k > 1:
DMG_vals = [[[0., 5., 7.5, 12.5, 17.5, 20., 25.], [0., 25.]],
[[0., 1.5, 3., 4.5, 6., 7.5, 9., 10.5, 12., 13.5,
15.,
16.5, 18., 19.5, 21., 22.5, 24., 25.5, 27., 28.5,
30.0],
[0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.,
11., 12., 13., 14., 15., 16., 17., 18., 19.,
20.]]]
else:
DMG_vals = [[[0., 25.], [0., 25.]],
[[0., 30.], [0., 20.]]]
DMG_vals = np.array(DMG_vals)
for story in [0, 1, 2]:
for dir_ in [0, 1]:
# print(story, dir_)
idx = pd.IndexSlice
DMG_check_FG = DMG_check.loc[:, idx[k + 1, :, :]]
DMG_check_PG = DMG_check_FG.iloc[:,
story * 2 * ndims + dir_ * ndims:story * 2 * ndims + (
dir_ + 1) * ndims]
DMG_val_test = np.unique(
np.around(DMG_check_PG.values * 10., decimals=0) / 10.,
return_counts=True)
DMG_val_test = DMG_val_test[0][DMG_val_test[1] > 10]
# only check at most the first 10 elements, because the
# higher values have extremely low likelihood
ddim = min(len(DMG_val_test), 10)
DMG_val_ref = DMG_vals[np.sign(k), dir_]
for v in DMG_val_test:
assert v in DMG_val_ref
# additional tests for mutually exclusive DS2 in FG3
if (k == 2) and (dep not in ['DS', 'IND']):
DMG_tot = [[0., 30.], [0., 20.]][dir_]
DMG_DS2_test = DMG_check_PG.iloc[:, [1, 2, 3]].sum(
axis=1)
# the proportion of each DS in DS2 shall follow the
# pre-assigned weights
ME_test = \
DMG_check_PG.iloc[DMG_DS2_test.values > 0].iloc[:,
[1, 2, 3]].describe().T['mean'].values / DMG_tot[-1]
assert_allclose(ME_test, [0.5, 0.3, 0.2], atol=0.01)
# the sum of DMG with correlated CSGs shall be either 0.
# or the total quantity
DMG_DS2_test = np.unique(
np.around(DMG_DS2_test * 10., decimals=0) / 10.,
return_counts=True)
DMG_DS2_test = DMG_DS2_test[0][DMG_DS2_test[1] > 10]
assert_allclose(DMG_DS2_test, DMG_tot, atol=0.01)
# additional tests for simultaneous DS2 in FG4
if (k == 3) and (dep not in ['DS', 'IND']):
DMG_tot = [30.0, 20.0][dir_]
DMG_DS2_test = DMG_check_PG.iloc[:, [1, 2, 3]].sum(
axis=1)
# the proportion of each DS in DS2 shall follow the
# pre-assigned weights considering replacement
SIM_test = \
DMG_check_PG.iloc[DMG_DS2_test.values > 0].iloc[:,
[1, 2, 3]].describe().T['mean'].values / DMG_tot
P_rep = 0.5 * 0.7 * 0.8
SIM_ref = np.array([0.5, 0.3, 0.2]) * (
1.0 + P_rep / (1.0 - P_rep))
assert_allclose(SIM_test, SIM_ref, atol=0.02)
# the sum of DMG with correlated CSGs shall be either
# 0. or more than the total quantity
DMG_DS2_test = DMG_DS2_test.iloc[
DMG_DS2_test.values > 0]
# Even with perfect correlation, the generated random
# samples will not be identical. Hence, one of the 20
# CSGs in FG4, very rarely will belong to a different
# DS than the rest. To avoid false negatives, we test
# the third smallest value.
assert DMG_DS2_test.sort_values().iloc[
2] >= DMG_tot * 0.99
assert np.max(DMG_DS2_test.values) > DMG_tot
# the first component has 3-1 CSGs in dir 1 and 2,
# respectively
if k == 0:
dir_len = int(rel_len * 3 / 4)
# the other components have 20-20 CSGs in dir 1 and 2,
# respectively
else:
dir_len = int(rel_len / 2)
if dir_ == 0:
theta_t = theta_test[:dir_len]
COV_t = COV_test[:dir_len, :dir_len]
else:
theta_t = theta_test[dir_len:]
COV_t = COV_test[dir_len:, dir_len:]
lim_ds1 = np.where(theta_t == lims[0])[0]
lim_ds2 = np.where(theta_t == lims[1])[0]
if k > 0:
lim_ds3 = np.where(theta_t == lims[2])[0]
ndim = len(theta_t)
EDP = EDP_list[int(k > 1), story, dir_]*1.2
DS_ref_all = []
DS_ref_any = []
DS_test_all = []
DS_test_any = []
# DS0
DS_ref_all.append(mvn_od(np.log(theta_t), COV_t,
lower=np.log(np.ones(ndim) * EDP),
upper=np.ones(ndim) * np.inf)[0])
if k == 0:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, 1] == 0.],
axis=0)) / 10000.)
elif k == 1:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, 1] == 0.,
DMG_check_PG.iloc[:, 2] == 0.],
axis=0)) / 10000.)
else:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, 1] == 0.,
DMG_check_PG.iloc[:, 2] == 0.,
DMG_check_PG.iloc[:, 3] == 0.,
DMG_check_PG.iloc[:, 4] == 0.],
axis=0)) / 10000.)
# DS1
lower_lim = -np.ones(ndim) * np.inf
upper_lim = np.ones(ndim) * np.inf
lower_lim[lim_ds2] = np.log(EDP)
upper_lim[lim_ds1] = np.log(EDP)
if k > 0:
lower_lim[lim_ds3] = np.log(EDP)
DS_ref_all.append(mvn_od(np.log(theta_t), COV_t,
lower=lower_lim, upper=upper_lim)[
0])
lower_lim = -np.ones(ndim) * np.inf
upper_lim = np.ones(ndim) * np.inf
lower_lim[lim_ds2[0]] = np.log(EDP)
upper_lim[lim_ds1[0]] = np.log(EDP)
if k > 0:
lower_lim[lim_ds3[0]] = np.log(EDP)
P_any = mvn_od(np.log(theta_t), COV_t, lower=lower_lim,
upper=upper_lim)[0]
if (dep in ['DS', 'IND']):
P_any = 1.0 - (1.0 - P_any) ** len(lim_ds1)
DS_ref_any.append(P_any)
if k == 0:
DS_test_all.append(np.sum(np.all(
[DMG_check_PG.iloc[:, 0] > DMG_val_ref[-1] - 0.1,
DMG_check_PG.iloc[:, 1] == 0.], axis=0)) / 10000.)
elif k == 1:
DS_test_all.append(np.sum(np.all(
[DMG_check_PG.iloc[:, 0] > DMG_val_ref[-1] - 0.1,
DMG_check_PG.iloc[:, 1] == 0.,
DMG_check_PG.iloc[:, 2] == 0.], axis=0)) / 10000.)
else:
DS_test_all.append(np.sum(np.all(
[DMG_check_PG.iloc[:, 0] > DMG_val_ref[-1] - 0.1,
DMG_check_PG.iloc[:, 1] == 0.,
DMG_check_PG.iloc[:, 2] == 0.,
DMG_check_PG.iloc[:, 3] == 0.,
DMG_check_PG.iloc[:, 4] == 0.], axis=0)) / 10000.)
DS_test_any.append(np.sum(
np.all([DMG_check_PG.iloc[:, 0] > 0.],
axis=0)) / 10000.)
# DS2
lower_lim = -np.ones(ndim) * np.inf
upper_lim = np.ones(ndim) * np.inf
upper_lim[lim_ds2] = np.log(EDP)
if k > 0:
lower_lim[lim_ds3] = np.log(EDP)
if k < 3:
DS_ref_all.append(mvn_od(np.log(theta_t), COV_t,
lower=lower_lim,
upper=upper_lim)[0])
else:
DS_ref_all.append(0.0)
lower_lim = -np.ones(ndim) * np.inf
upper_lim = np.ones(ndim) * np.inf
upper_lim[lim_ds2[0]] = np.log(EDP)
if k > 0:
lower_lim[lim_ds3[0]] = np.log(EDP)
P_any = mvn_od(np.log(theta_t), COV_t, lower=lower_lim,
upper=upper_lim)[0]
if (dep in ['DS', 'IND']):
P_any = 1.0 - (1.0 - P_any) ** len(lim_ds1)
DS_ref_any.append(P_any)
if k == 0:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, 1] >
DMG_val_ref[-1] - 0.1],
axis=0)) / 10000.)
elif k == 1:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, 1] >
DMG_val_ref[-1] - 0.1,
DMG_check_PG.iloc[:, 2] == 0.],
axis=0)) / 10000.)
elif k == 2:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, [1, 2, 3]].sum(
axis=1) > DMG_val_ref[-1] - 0.1,
DMG_check_PG.iloc[:, 4] == 0.],
axis=0)) / 10000.)
elif k == 3:
# skip this case
DS_test_all.append(0.0)
if k < 2:
DS_test_any.append(np.sum(
np.all([DMG_check_PG.iloc[:, 1] > 0.],
axis=0)) / 10000.)
else:
DS_test_any.append(np.sum(np.all(
[DMG_check_PG.iloc[:, [1, 2, 3]].sum(axis=1) > 0.],
axis=0)) / 10000.)
# DS3
if k > 0:
lower_lim = -np.ones(ndim) * np.inf
upper_lim = np.ones(ndim) * np.inf
upper_lim[lim_ds3] = np.log(EDP)
DS_ref_all.append(mvn_od(np.log(theta_t), COV_t,
lower=lower_lim,
upper=upper_lim)[0])
lower_lim = -np.ones(ndim) * np.inf
upper_lim = np.ones(ndim) * np.inf
upper_lim[lim_ds3[0]] = np.log(EDP)
P_any = mvn_od(np.log(theta_t), COV_t, lower=lower_lim,
upper=upper_lim)[0]
if (dep in ['DS', 'IND']):
P_any = 1.0 - (1.0 - P_any) ** len(lim_ds1)
DS_ref_any.append(P_any)
if k == 1:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, 1] == 0.,
DMG_check_PG.iloc[:, 2] >
DMG_val_ref[-1] - 0.1],
axis=0)) / 10000.)
else:
DS_test_all.append(
np.sum(np.all([DMG_check_PG.iloc[:, 0] == 0.,
DMG_check_PG.iloc[:, 1] == 0.,
DMG_check_PG.iloc[:, 2] == 0.,
DMG_check_PG.iloc[:, 3] == 0.,
DMG_check_PG.iloc[:, 4] >
DMG_val_ref[-1] - 0.1],
axis=0)) / 10000.)
if k == 1:
DS_test_any.append(np.sum(
np.all([DMG_check_PG.iloc[:, 2] > 0.],
axis=0)) / 10000.)
else:
DS_test_any.append(np.sum(
np.all([DMG_check_PG.iloc[:, 4] > 0.],
axis=0)) / 10000.)
assert_allclose(DS_ref_all, DS_test_all, atol=0.02)
assert_allclose(DS_ref_any, DS_test_any, atol=0.02)
# ---------------------------------------------------------------------
A.calculate_losses()
# ---------------------------------------------- check loss calculation
# No additional uncertainty is introduced when it comes to losses in
# this test. The decision variables and the damaged quantities shall
# follow the same distribution and have the same correlation structure.
# The damaged quantities have already been verified, so now we use them
# as reference values for testing the decision variables.
# COST and TIME and INJ
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
DV_INJ_dict = deepcopy(A._DV_dict['injuries'])
DV_INJ0 = DV_INJ_dict[0]
DV_INJ1 = DV_INJ_dict[1]
DMG_check = A._DMG
for k in range(4):
# Start with checking the correlations...
dmg = DMG_check.loc[:, (DMG_check != 0.0).any(axis=0)]
dmg_corr = dmg.loc[:, idx[k + 1, :, :]].corr()
for dv in [DV_COST, DV_TIME, DV_INJ0, DV_INJ1]:
dv = dv.loc[:, (dv != 0.0).any(axis=0)]
dv_corr = dv.loc[:, idx[k + 1, :, :]].corr()
assert_allclose(dmg_corr.values, dv_corr.values, atol=0.001)
# then check the distribution.
# After normalizing with the damaged quantities all decision
# variables in a given DS shall have the same value.
dv = ((dv / dmg).describe().T).fillna(0.0)
assert_allclose(dv['std'], np.zeros(len(dv.index)), atol=1.0)
# red tags require special checks
for f, fg_id in enumerate(sorted(A._FG_dict.keys())):
dims = [2, 3, 5, 5][f]
# take the total quantity of each performance group
FG = A._FG_dict[fg_id]
qnt = []
for PG in FG._performance_groups:
if isinstance(PG._quantity, RandomVariable):
qnt.append((PG._quantity.samples[:dims]).flatten())
else:
qnt.append(np.ones(dims) * PG._quantity)
qnt = np.array(qnt).flatten()
# flag the samples where the damage exceeds the pre-defined limit
# for red tagging
dmg = DMG_check.loc[:, idx[FG._ID, :, :]]
red_ref = dmg > 0.489 * qnt
# collect the red tag results from the analysis
red_test = A._DV_dict['red_tag'].loc[:, idx[FG._ID, :, :]]
# compare
red_diff = (red_ref - red_test).describe().T
assert_allclose(red_diff['mean'].values, 0.)
assert_allclose(red_diff['std'].values, 0.)
# ---------------------------------------------------------------------
A.aggregate_results()
# -------------------------------------------- check result aggregation
# Aggregate results are checked in detail by other tests.
# Here we only focus on some simple checks to make sure the results
# make sense.
S = A._SUMMARY
SD = S.describe().T
assert SD.loc[('inhabitants', ''), 'mean'] == 10.0
assert SD.loc[('inhabitants', ''), 'std'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'mean'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'std'] == 0.0
assert_allclose(A._DV_dict['rec_cost'].sum(axis=1),
S.loc[:, ('reconstruction', 'cost')])
assert_allclose(A._DV_dict['rec_time'].sum(axis=1),
S.loc[:, ('reconstruction', 'time-sequential')])
assert_allclose(A._DV_dict['rec_time'].max(axis=1),
S.loc[:, ('reconstruction', 'time-parallel')])
assert_allclose(A._DV_dict['injuries'][0].sum(axis=1),
S.loc[:, ('injuries', 'sev1')])
assert_allclose(A._DV_dict['injuries'][1].sum(axis=1),
S.loc[:, ('injuries', 'sev2')])
def test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies_PG():
test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies('PG')
def test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies_DIR():
test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies('DIR')
def test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies_LOC():
test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies('LOC')
def test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies_ATC():
test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies('ATC')
def test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies_CSG():
test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies('CSG')
def test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies_DS():
test_FEMA_P58_Assessment_FRAG_uncertainty_dependencies('DS')
def test_FEMA_P58_Assessment_DV_uncertainty_dependencies():
"""
Perform loss assessment with customized inputs that focus on testing the
propagation of uncertainty in consequence functions and decision variables.
Dispersions in other calculation parameters are reduced to negligible
levels. This allows us to test the results against pre-defined reference
values in spite of the randomness involved in the calculations.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_10.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_10.out"
dep_list = ['IND', 'FG', 'PG', 'DIR', 'LOC', 'DS']
for d in range(7):
if d > 0:
dep_COST = dep_list[[0, 1, 2, 3, 4, 5][d - 1]]
dep_TIME = dep_list[[1, 2, 3, 4, 5, 0][d - 1]]
dep_RED = dep_list[[2, 3, 4, 5, 0, 1][d - 1]]
dep_INJ = dep_list[[3, 4, 5, 0, 1, 2][d - 1]]
else:
dep_COST = np.random.choice(dep_list)
dep_TIME = np.random.choice(dep_list)
dep_RED = np.random.choice(dep_list)
dep_INJ = np.random.choice(dep_list)
dep_CT = np.random.choice([True, False])
dep_ILVL = np.random.choice([True, False])
#print([dep_COST, dep_TIME, dep_RED, dep_INJ, dep_CT, dep_ILVL], end=' ')
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
# set the dependencies
A._AIM_in['dependencies']['rec_costs'] = dep_COST
A._AIM_in['dependencies']['rec_times'] = dep_TIME
A._AIM_in['dependencies']['red_tags'] = dep_RED
A._AIM_in['dependencies']['injuries'] = dep_INJ
A._AIM_in['dependencies']['cost_and_time'] = dep_CT
A._AIM_in['dependencies']['injury_lvls'] = dep_ILVL
A.define_random_variables()
# ---------------------------------------------- check random variables
rho_ref = dict(
IND=np.zeros((16, 16)),
FG=np.ones((16, 16)),
PG=np.array([
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
]),
LOC=np.array([
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
]),
DIR=np.array([
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
]),
DS=np.array([
[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.],
])
)
np.fill_diagonal(rho_ref['IND'], 1.0)
# RV_REP = deepcopy(A._RV_dict['DV_REP'])
# RV_RED = deepcopy(A._RV_dict['DV_RED'])
# RV_INJ = deepcopy(A._RV_dict['DV_INJ'])
RV_REP = list(A._DV_REP_dict.values())
RV_RED = list(A._DV_RED_dict.values())
RV_INJ = list(A._DV_INJ_dict.values())
for r, (RV_DV, RV_tag) in enumerate(
zip([RV_REP, RV_RED, RV_INJ], ['rep', 'red', 'inj'])):
# assert len(RV_DV._dimension_tags) == [32, 16, 32][r]
assert len(RV_DV) == [32, 16, 32][r]
DV_theta_test, DV_beta_test = np.array([rv.theta for rv in RV_DV]).T
DV_rho_test = RV_DV[0].RV_set.Rho([rv.name for rv in RV_DV])
# COV_test = RV_DV.COV
# sig_test = np.sqrt(np.diagonal(COV_test))
# rho_test = COV_test / np.outer(sig_test, sig_test)
if RV_tag == 'rep':
assert_allclose(DV_theta_test, np.ones(32))
assert_allclose(DV_beta_test, np.array(
[0.31, 0.71] * 8 + [0.32, 0.72] * 8))
if dep_CT == True:
if (((dep_COST == 'LOC') and (dep_TIME == 'DIR')) or
((dep_COST == 'DIR') and (dep_TIME == 'LOC'))):
rho_ref_CT = rho_ref['PG']
else:
rho_ref_CT = np.maximum(rho_ref[dep_COST],
rho_ref[dep_TIME])
assert_allclose(DV_rho_test[:16, :16], rho_ref_CT)
assert_allclose(DV_rho_test[16:, 16:], rho_ref_CT)
assert_allclose(DV_rho_test[:16, 16:], rho_ref_CT)
assert_allclose(DV_rho_test[16:, :16], rho_ref_CT)
else:
assert_allclose(DV_rho_test[:16, :16], rho_ref[dep_COST])
assert_allclose(DV_rho_test[16:, 16:], rho_ref[dep_TIME])
assert_allclose(DV_rho_test[:16, 16:], np.zeros((16, 16)))
assert_allclose(DV_rho_test[16:, :16], np.zeros((16, 16)))
elif RV_tag == 'red':
assert_allclose(DV_theta_test, np.ones(16))
assert_allclose(DV_beta_test, np.array([0.33, 0.73] * 8))
assert_allclose(DV_rho_test, rho_ref[dep_RED])
elif RV_tag == 'inj':
assert_allclose(DV_theta_test, np.ones(32))
assert_allclose(DV_beta_test, np.array(
[0.34, 0.74] * 8 + [0.35, 0.75] * 8))
if dep_ILVL == True:
assert_allclose(DV_rho_test[:16, :16], rho_ref[dep_INJ])
assert_allclose(DV_rho_test[16:, 16:], rho_ref[dep_INJ])
assert_allclose(DV_rho_test[:16, 16:], rho_ref[dep_INJ])
assert_allclose(DV_rho_test[16:, :16], rho_ref[dep_INJ])
else:
assert_allclose(DV_rho_test[:16, :16], rho_ref[dep_INJ])
assert_allclose(DV_rho_test[16:, 16:], rho_ref[dep_INJ])
assert_allclose(DV_rho_test[:16, 16:], np.zeros((16, 16)))
assert_allclose(DV_rho_test[16:, :16], np.zeros((16, 16)))
# ---------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# -------------------------------------------- check damage calculation
# COL
# there shall be no collapses
assert A._COL.describe().T['mean'].values == 0
# DMG
DMG_check = A._DMG
# Fragilities are not tested here, so we only do a few simple checks
assert np.min(DMG_check.describe().loc['mean'].values) > 0.
assert np.min(DMG_check.describe().loc['std'].values) > 0.
# ---------------------------------------------------------------------
A.calculate_losses()
# ---------------------------------------------- check loss calculation
# COST and TIME and INJ
DV_COST = A._DV_dict['rec_cost'] / DMG_check
DV_TIME = A._DV_dict['rec_time'] / DMG_check
DV_INJ_dict = deepcopy(A._DV_dict['injuries'])
DV_INJ0 = DV_INJ_dict[0] / DMG_check
DV_INJ1 = DV_INJ_dict[1] / DMG_check
for dv_i, (DV, DV_tag) in enumerate(
zip([DV_COST, DV_TIME, DV_INJ0, DV_INJ1],
['cost', 'time', 'inj0', 'inj1'])):
DV_desc = DV.describe().T
DV_desc_log = np.log(DV).describe().T
if DV_tag == 'cost':
# cost consequences in DS1 are lognormal
mu_ds1_ref = np.exp(np.log(10.) + 0.31 ** 2. / 2.)
sig_ds1_ref = np.sqrt(
np.exp(2 * np.log(10.) + 0.31 ** 2.) * (
np.exp(0.31 ** 2.) - 1.))
assert_allclose(DV_desc['mean'].values[::2], mu_ds1_ref,
rtol=0.02)
assert_allclose(DV_desc['std'].values[::2], sig_ds1_ref,
rtol=0.10)
assert_allclose(DV_desc_log['mean'].values[::2],
np.log(10.), atol=0.02)
assert_allclose(DV_desc_log['std'].values[::2], 0.31,
rtol=0.10)
# cost consequences in DS2 are (truncated) normal
mu_ds2_ref, var_ds2_ref = tnorm.stats(-1. / 0.71, 1000.,
loc=1000., scale=710.,
moments='mv')
sig_ds2_ref = np.sqrt(var_ds2_ref)
assert_allclose(DV_desc['mean'].values[1::2], mu_ds2_ref,
rtol=0.05)
assert_allclose(DV_desc['std'].values[1::2], sig_ds2_ref,
rtol=0.10)
# make sure that all damages correspond to positive
# reconstruction costs
assert np.all(np.min(DV) > 0.)
elif DV_tag == 'time':
# cost consequences in DS1 are (truncated) normal for FG1 and
# lognormal for FG2
# DS1 - FG1
mu_ds1_ref, var_ds1_ref = tnorm.stats(-1. / 0.32, 1000.,
loc=0.01,
scale=0.0032,
moments='mv')
sig_ds1_ref = np.sqrt(var_ds1_ref)
assert_allclose(DV_desc['mean'].values[::2][:4], mu_ds1_ref,
rtol=0.02)
assert_allclose(DV_desc['std'].values[::2][:4], sig_ds1_ref,
rtol=0.20)
assert np.mean(
DV_desc['std'].values[::2][:4]) == pytest.approx(
sig_ds1_ref, rel=0.1)
# DS1 - FG2
mu_ds1_ref = np.exp(np.log(0.01) + 0.32 ** 2. / 2.)
sig_ds1_ref = np.sqrt(
np.exp(2 * np.log(0.01) + 0.32 ** 2.) * (
np.exp(0.32 ** 2.) - 1.))
assert_allclose(DV_desc['mean'].values[::2][4:], mu_ds1_ref,
rtol=0.02)
assert_allclose(DV_desc['std'].values[::2][4:], sig_ds1_ref,
rtol=0.20)
assert np.mean(
DV_desc['std'].values[::2][4:]) == pytest.approx(
sig_ds1_ref, rel=0.1)
assert_allclose(DV_desc_log['mean'].values[::2][4:],
np.log(0.01), atol=0.02)
assert_allclose(DV_desc_log['std'].values[::2][4:], 0.32,
rtol=0.20)
assert np.mean(
DV_desc_log['std'].values[::2][4:]) == pytest.approx(
0.32, rel=0.1)
# cost consequences in DS2 are lognormal for FG1 and
# (truncated) normal for FG2
# DS2 - FG1
mu_ds2_ref = np.exp(np.log(1.) + 0.72 ** 2. / 2.)
sig_ds2_ref = np.sqrt(
np.exp(2 * np.log(1.) + 0.72 ** 2.) * (
np.exp(0.72 ** 2.) - 1.))
assert_allclose(DV_desc['mean'].values[1::2][:4],
mu_ds2_ref, rtol=0.05)
assert_allclose(DV_desc['std'].values[1::2][:4],
sig_ds2_ref, rtol=0.20)
assert np.mean(
DV_desc['std'].values[1::2][:4]) == pytest.approx(
sig_ds2_ref, rel=0.1)
assert_allclose(DV_desc_log['mean'].values[1::2][:4],
np.log(1.), atol=0.05)
assert_allclose(DV_desc_log['std'].values[1::2][:4], 0.72,
rtol=0.20)
assert np.mean(
DV_desc_log['std'].values[1::2][:4]) == pytest.approx(
0.72, rel=0.1)
# DS2 - FG2
mu_ds2_ref, var_ds2_ref = tnorm.stats(-1. / 0.72, 1000.,
loc=1., scale=0.72,
moments='mv')
sig_ds2_ref = np.sqrt(var_ds2_ref)
assert_allclose(DV_desc['mean'].values[1::2][4:],
mu_ds2_ref, rtol=0.05)
assert_allclose(DV_desc['std'].values[1::2][4:],
sig_ds2_ref, rtol=0.20)
assert np.mean(
DV_desc['std'].values[1::2][4:]) == pytest.approx(
sig_ds2_ref, rel=0.1)
# make sure that all damages correspond to positive
# reconstruction time
assert np.all(np.min(DV) > 0.)
elif DV_tag in ['inj0', 'inj1']:
# Injuries follow a truncated normal distribution in all cases
# The beta values provided are coefficients of variation of the
# non-truncated distribution. These provide the reference mean
# and standard deviation values for the truncated case.
mu_ds1, mu_ds2 = {'inj0': [0.5, 0.6], 'inj1': [0.1, 0.2]}[
DV_tag]
beta_ds1, beta_ds2 = \
{'inj0': [0.34, 0.74], 'inj1': [0.35, 0.75]}[DV_tag]
# DS1
# The affected population in DS1 per unit quantity (identical
# for all FGs and injury levels)
p_aff = 0.05
mu_ref, var_ref = tnorm.stats(-1. / beta_ds1, (
1. - mu_ds1) / mu_ds1 / beta_ds1, loc=mu_ds1,
scale=mu_ds1 * beta_ds1,
moments='mv')
sig_ref = np.sqrt(var_ref)
assert_allclose(DV_desc['mean'].values[::2], mu_ref * p_aff,
rtol=beta_ds1 / 10.)
assert_allclose(DV_desc['std'].values[::2], sig_ref * p_aff,
rtol=0.20)
assert np.mean(
DV_desc['std'].values[::2]) == pytest.approx(
sig_ref * p_aff, rel=0.1)
# DS2
# the affected population in DS1 per unit quantity (identical
# for all FGs and injury levels)
p_aff = 0.1
mu_ref, var_ref = tnorm.stats(-1. / beta_ds2, (
1. - mu_ds2) / mu_ds2 / beta_ds2, loc=mu_ds2,
scale=mu_ds2 * beta_ds2,
moments='mv')
sig_ref = np.sqrt(var_ref)
assert_allclose(DV_desc['mean'].values[1::2],
mu_ref * p_aff, rtol=beta_ds2 / 10.)
assert_allclose(DV_desc['std'].values[1::2],
sig_ref * p_aff, rtol=0.20)
assert np.mean(
DV_desc['std'].values[1::2]) == pytest.approx(
sig_ref * p_aff, rel=0.1)
# red tags have to be treated separately
DV_RED = A._DV_dict['red_tag']
DMG_norm = DMG_check / 25.
for i in range(16):
is_dam = pd.DataFrame(np.zeros((len(DMG_norm.index), 5)),
columns=range(5))
is_dam[0] = (DMG_norm.iloc[:, i] < 0.01)
is_dam[1] = (DMG_norm.iloc[:, i] > 0.01) & (
DMG_norm.iloc[:, i] < 0.275)
is_dam[2] = (DMG_norm.iloc[:, i] > 0.275) & (
DMG_norm.iloc[:, i] < 0.525)
is_dam[3] = (DMG_norm.iloc[:, i] > 0.525) & (
DMG_norm.iloc[:, i] < 0.775)
is_dam[4] = (DMG_norm.iloc[:, i] > 0.775)
mu_red = ([0.87, 0.23185] * 4 + [0.50, 0.23185] * 4)[i]
beta_red = ([0.33, 0.73] * 8)[i]
mu_ref = np.zeros(5)
mu_ref[1] = tnorm.cdf(0.25, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
mu_ref[2] = tnorm.cdf(0.50, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
mu_ref[3] = tnorm.cdf(0.75, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
mu_ref[4] = tnorm.cdf(1.00, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
sample_count = np.array(
[(DV_RED.iloc[:, i])[is_dam[c]].describe().loc['count'] for
c in range(5)])
mu_test = np.array(
[(DV_RED.iloc[:, i])[is_dam[c]].describe().loc['mean'] for c
in range(5)])
assert mu_test[0] == 0.
for step in range(1, 5):
if sample_count[step] > 0:
assert mu_test[step] == pytest.approx(
mu_ref[step],
abs=5 * 0.4 / np.sqrt(sample_count[step]))
# CORRELATIONS
# repair and injury correlations
DV_REP = pd.concat([DV_COST, DV_TIME], axis=1)
DV_INJ = pd.concat([DV_INJ0, DV_INJ1], axis=1)
for DV, RV, dv_tag in zip([DV_REP, DV_INJ, DV_RED],
[RV_REP, RV_INJ, RV_RED],
['rep', 'inj', 'red']):
if dv_tag == 'rep':
# transform the lognormal variables to log scale
log_flags = ([True, False] * 8 +
[False, True] * 4 +
[True, False] * 4)
for c, is_log in enumerate(log_flags):
if is_log:
DV.iloc[:, c] = np.log(DV.iloc[:, c])
elif dv_tag == 'red':
DV_RED_n = pd.DataFrame(np.ones(DV.shape) * np.nan,
index=DV.index, columns=DV.columns)
DMG_filter = pd.concat(
[(DMG_check.iloc[:, [0, 2, 4, 6]] / 25.0 > 0.525) & (
DMG_check.iloc[:, [0, 2, 4, 6]] / 25.0 < 0.775),
(DMG_check.iloc[:, [1, 3, 5, 7]] / 25.0 > 0.025) & (
DMG_check.iloc[:, [1, 3, 5, 7]] / 25.0 < 0.275),
(DMG_check.iloc[:, [8, 10, 12, 14]] / 25.0 > 0.275) & (
DMG_check.iloc[:, [8, 10, 12, 14]] / 25.0 < 0.525),
(DMG_check.iloc[:, [9, 11, 13, 15]] / 25.0 > 0.025) & (
DMG_check.iloc[:,
[9, 11, 13, 15]] / 25.0 < 0.275)], axis=1)
DV_RED_n[DMG_filter] = DV_RED[DMG_filter]
DV = DV_RED_n
DV_corr = DV.corr()
# use the correlations specified for the random variable as
# reference (that we already verified earlier)
# COV_ref = RV.COV
# sig_ref = np.sqrt(np.diagonal(COV_ref))
# rho_ref = COV_ref / np.outer(sig_ref, sig_ref)
rho_ref = RV[0].RV_set.Rho([rv.name for rv in RV])
# perform the tests
for i in range(len(DV_corr.index)):
for j in range(len(DV_corr.columns)):
ref_i = rho_ref[i, j]
if ref_i != 0.0:
if ref_i > 0.0:
assert DV_corr.iloc[i, j] > 0.97 * ref_i
else:
assert DV_corr.iloc[i, j] < 0.0
else:
assert DV_corr.iloc[i, j] == pytest.approx(ref_i,
abs=0.15)
# ---------------------------------------------------------------------
A.aggregate_results()
# -------------------------------------------- check result aggregation
# Aggregate results are checked in detail by other tests.
# Here we only focus on some simple checks to make sure the results
# make sense.
S = A._SUMMARY
SD = S.describe().T
assert SD.loc[('inhabitants', ''), 'mean'] == 20.0
assert SD.loc[('inhabitants', ''), 'std'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'mean'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'std'] == 0.0
assert_allclose(A._DV_dict['rec_cost'].sum(axis=1),
S.loc[:, ('reconstruction', 'cost')])
assert_allclose(A._DV_dict['rec_time'].sum(axis=1),
S.loc[:, ('reconstruction', 'time-sequential')])
assert_allclose(A._DV_dict['rec_time'].max(axis=1),
S.loc[:, ('reconstruction', 'time-parallel')])
assert_allclose(A._DV_dict['injuries'][0].sum(axis=1),
S.loc[:, ('injuries', 'sev1')])
assert_allclose(A._DV_dict['injuries'][1].sum(axis=1),
S.loc[:, ('injuries', 'sev2')])
#print()
def test_FEMA_P58_Assessment_DV_uncertainty_dependencies_with_partial_DV_data():
"""
Perform loss assessment with customized inputs that focus on testing the
propagation of uncertainty in consequence functions and decision variables
when not every component has injury and red tag consequences assigned to it.
Dispersions in other calculation parameters are reduced to negligible
levels. This allows us to test the results against pre-defined reference
values in spite of the randomness involved in the calculations.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_11.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_11.out"
dep_list = ['IND', 'FG', 'PG', 'DIR', 'LOC', 'DS']
for d in range(7):
if d > 0:
dep_COST = dep_list[[0, 1, 2, 3, 4, 5][d - 1]]
dep_TIME = dep_list[[1, 2, 3, 4, 5, 0][d - 1]]
dep_RED = dep_list[[2, 3, 4, 5, 0, 1][d - 1]]
dep_INJ = dep_list[[3, 4, 5, 0, 1, 2][d - 1]]
else:
dep_COST = np.random.choice(dep_list)
dep_TIME = np.random.choice(dep_list)
dep_RED = np.random.choice(dep_list)
dep_INJ = np.random.choice(dep_list)
dep_CT = np.random.choice([True, False])
dep_ILVL = np.random.choice([True, False])
# print([dep_COST, dep_TIME, dep_RED, dep_INJ, dep_CT, dep_ILVL], end=' ')
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
# set the dependencies
A._AIM_in['dependencies']['rec_costs'] = dep_COST
A._AIM_in['dependencies']['rec_times'] = dep_TIME
A._AIM_in['dependencies']['red_tags'] = dep_RED
A._AIM_in['dependencies']['injuries'] = dep_INJ
A._AIM_in['dependencies']['cost_and_time'] = dep_CT
A._AIM_in['dependencies']['injury_lvls'] = dep_ILVL
A.define_random_variables()
# ---------------------------------------------- check random variables
rho_ref = dict(
IND=np.zeros((16, 16)),
FG=np.ones((16, 16)),
PG=np.array([
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.],
]),
LOC=np.array([
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 1., 1.],
]),
DIR=np.array([
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1.],
]),
DS=np.array([
[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1.],
])
)
np.fill_diagonal(rho_ref['IND'], 1.0)
# RV_REP = deepcopy(A._RV_dict['DV_REP'])
# RV_RED = deepcopy(A._RV_dict['DV_RED'])
# RV_INJ = deepcopy(A._RV_dict['DV_INJ'])
RV_REP = list(A._DV_REP_dict.values())
RV_RED = list(A._DV_RED_dict.values())
RV_INJ = list(A._DV_INJ_dict.values())
for r, (RV_DV, RV_tag) in enumerate(
zip([RV_REP, RV_RED, RV_INJ], ['rep', 'red', 'inj'])):
# assert len(RV_DV._dimension_tags) == [32, 8, 16][r]
assert len(RV_DV) == [32, 8, 16][r]
DV_theta_test, DV_beta_test = np.array([rv.theta for rv in RV_DV]).T
DV_rho_test = RV_DV[0].RV_set.Rho([rv.name for rv in RV_DV])
# COV_test = RV_DV.COV
# sig_test = np.sqrt(np.diagonal(COV_test))
# rho_test = COV_test / np.outer(sig_test, sig_test)
if RV_tag == 'rep':
assert_allclose(DV_theta_test, np.ones(32))
assert_allclose(DV_beta_test, np.array(
[0.31, 0.71] * 8 + [0.32, 0.72] * 8))
if dep_CT == True:
if (((dep_COST == 'LOC') and (dep_TIME == 'DIR')) or
((dep_COST == 'DIR') and (dep_TIME == 'LOC'))):
rho_ref_CT = rho_ref['PG']
else:
rho_ref_CT = np.maximum(rho_ref[dep_COST],
rho_ref[dep_TIME])
assert_allclose(DV_rho_test[:16, :16], rho_ref_CT)
assert_allclose(DV_rho_test[16:, 16:], rho_ref_CT)
assert_allclose(DV_rho_test[:16, 16:], rho_ref_CT)
assert_allclose(DV_rho_test[16:, :16], rho_ref_CT)
else:
assert_allclose(DV_rho_test[:16, :16], rho_ref[dep_COST])
assert_allclose(DV_rho_test[16:, 16:], rho_ref[dep_TIME])
assert_allclose(DV_rho_test[:16, 16:], np.zeros((16, 16)))
assert_allclose(DV_rho_test[16:, :16], np.zeros((16, 16)))
elif RV_tag == 'red':
assert_allclose(DV_theta_test, np.ones(8))
assert_allclose(DV_beta_test, np.array([0.33, 0.73] * 4))
assert_allclose(DV_rho_test, rho_ref[dep_RED][:8,:8])
elif RV_tag == 'inj':
assert_allclose(DV_theta_test, np.ones(16))
assert_allclose(DV_beta_test, np.array(
[0.34, 0.74] * 4 + [0.35, 0.75] * 4))
if dep_ILVL == True:
assert_allclose(DV_rho_test[:8, :8], rho_ref[dep_INJ][:8,:8])
assert_allclose(DV_rho_test[8:, 8:], rho_ref[dep_INJ][:8,:8])
assert_allclose(DV_rho_test[:8, 8:], rho_ref[dep_INJ][:8,:8])
assert_allclose(DV_rho_test[8:, :8], rho_ref[dep_INJ][:8,:8])
else:
assert_allclose(DV_rho_test[:8, :8], rho_ref[dep_INJ][:8,:8])
assert_allclose(DV_rho_test[8:, 8:], rho_ref[dep_INJ][:8,:8])
assert_allclose(DV_rho_test[:8, 8:], np.zeros((8, 8)))
assert_allclose(DV_rho_test[8:, :8], np.zeros((8, 8)))
# ---------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# -------------------------------------------- check damage calculation
# COL
# there shall be no collapses
assert A._COL.describe().T['mean'].values == 0
# DMG
DMG_check = A._DMG
# Fragilities are not tested here, so we only do a few simple checks
assert np.min(DMG_check.describe().loc['mean'].values) > 0.
assert np.min(DMG_check.describe().loc['std'].values) > 0.
# ---------------------------------------------------------------------
A.calculate_losses()
# ---------------------------------------------- check loss calculation
# COST and TIME and INJ
DV_COST = A._DV_dict['rec_cost'] / DMG_check
DV_TIME = A._DV_dict['rec_time'] / DMG_check
DV_INJ_dict = deepcopy(A._DV_dict['injuries'])
DV_INJ0 = DV_INJ_dict[0] / DMG_check
DV_INJ1 = DV_INJ_dict[1] / DMG_check
for dv_i, (DV, DV_tag) in enumerate(
zip([DV_COST, DV_TIME, DV_INJ0, DV_INJ1],
['cost', 'time', 'inj0', 'inj1'])):
DV_desc = DV.describe().T
DV_desc_log = np.log(DV).describe().T
if DV_tag == 'cost':
# cost consequences in DS1 are lognormal
mu_ds1_ref = np.exp(np.log(10.) + 0.31 ** 2. / 2.)
sig_ds1_ref = np.sqrt(
np.exp(2 * np.log(10.) + 0.31 ** 2.) * (
np.exp(0.31 ** 2.) - 1.))
assert_allclose(DV_desc['mean'].values[::2], mu_ds1_ref,
rtol=0.02)
assert_allclose(DV_desc['std'].values[::2], sig_ds1_ref,
rtol=0.10)
assert_allclose(DV_desc_log['mean'].values[::2],
np.log(10.), atol=0.02)
assert_allclose(DV_desc_log['std'].values[::2], 0.31,
rtol=0.10)
# cost consequences in DS2 are (truncated) normal
mu_ds2_ref, var_ds2_ref = tnorm.stats(-1. / 0.71, 1000.,
loc=1000., scale=710.,
moments='mv')
sig_ds2_ref = np.sqrt(var_ds2_ref)
assert_allclose(DV_desc['mean'].values[1::2], mu_ds2_ref,
rtol=0.05)
assert_allclose(DV_desc['std'].values[1::2], sig_ds2_ref,
rtol=0.10)
# make sure that all damages correspond to positive
# reconstruction costs
assert np.all(np.min(DV) > 0.)
elif DV_tag == 'time':
# cost consequences in DS1 are (truncated) normal for FG1 and
# lognormal for FG2
# DS1 - FG1
mu_ds1_ref, var_ds1_ref = tnorm.stats(-1. / 0.32, 1000.,
loc=0.01,
scale=0.0032,
moments='mv')
sig_ds1_ref = np.sqrt(var_ds1_ref)
assert_allclose(DV_desc['mean'].values[::2][:4], mu_ds1_ref,
rtol=0.02)
assert_allclose(DV_desc['std'].values[::2][:4], sig_ds1_ref,
rtol=0.20)
assert np.mean(
DV_desc['std'].values[::2][:4]) == pytest.approx(
sig_ds1_ref, rel=0.1)
# DS1 - FG2
mu_ds1_ref = np.exp(np.log(0.01) + 0.32 ** 2. / 2.)
sig_ds1_ref = np.sqrt(
np.exp(2 * np.log(0.01) + 0.32 ** 2.) * (
np.exp(0.32 ** 2.) - 1.))
assert_allclose(DV_desc['mean'].values[::2][4:], mu_ds1_ref,
rtol=0.02)
assert_allclose(DV_desc['std'].values[::2][4:], sig_ds1_ref,
rtol=0.20)
assert np.mean(
DV_desc['std'].values[::2][4:]) == pytest.approx(
sig_ds1_ref, rel=0.1)
assert_allclose(DV_desc_log['mean'].values[::2][4:],
np.log(0.01), atol=0.02)
assert_allclose(DV_desc_log['std'].values[::2][4:], 0.32,
rtol=0.20)
assert np.mean(
DV_desc_log['std'].values[::2][4:]) == pytest.approx(
0.32, rel=0.1)
# cost consequences in DS2 are lognormal for FG1 and
# (truncated) normal for FG2
# DS2 - FG1
mu_ds2_ref = np.exp(np.log(1.) + 0.72 ** 2. / 2.)
sig_ds2_ref = np.sqrt(
np.exp(2 * np.log(1.) + 0.72 ** 2.) * (
np.exp(0.72 ** 2.) - 1.))
assert_allclose(DV_desc['mean'].values[1::2][:4],
mu_ds2_ref, rtol=0.05)
assert_allclose(DV_desc['std'].values[1::2][:4],
sig_ds2_ref, rtol=0.20)
assert np.mean(
DV_desc['std'].values[1::2][:4]) == pytest.approx(
sig_ds2_ref, rel=0.1)
assert_allclose(DV_desc_log['mean'].values[1::2][:4],
np.log(1.), atol=0.05)
assert_allclose(DV_desc_log['std'].values[1::2][:4], 0.72,
rtol=0.20)
assert np.mean(
DV_desc_log['std'].values[1::2][:4]) == pytest.approx(
0.72, rel=0.1)
# DS2 - FG2
mu_ds2_ref, var_ds2_ref = tnorm.stats(-1. / 0.72, 1000.,
loc=1., scale=0.72,
moments='mv')
sig_ds2_ref = np.sqrt(var_ds2_ref)
assert_allclose(DV_desc['mean'].values[1::2][4:],
mu_ds2_ref, rtol=0.05)
assert_allclose(DV_desc['std'].values[1::2][4:],
sig_ds2_ref, rtol=0.20)
assert np.mean(
DV_desc['std'].values[1::2][4:]) == pytest.approx(
sig_ds2_ref, rel=0.1)
# make sure that all damages correspond to positive
# reconstruction time
assert np.all(np.min(DV) > 0.)
elif DV_tag in ['inj0', 'inj1']:
# Injuries follow a truncated normal distribution in all cases
# The beta values provided are coefficients of variation of the
# non-truncated distribution. These provide the reference mean
# and standard deviation values for the truncated case.
mu_ds1, mu_ds2 = {'inj0': [0.5, 0.6],
'inj1': [0.1, 0.2]}[DV_tag]
beta_ds1, beta_ds2 = {'inj0': [0.34, 0.74],
'inj1': [0.35, 0.75]}[DV_tag]
# DS1
# The affected population in DS1 per unit quantity (identical
# for all FGs and injury levels)
p_aff = 0.05
mu_ref, var_ref = tnorm.stats(
-1. / beta_ds1, (1. - mu_ds1) / mu_ds1 / beta_ds1,
loc=mu_ds1,
scale=mu_ds1 * beta_ds1,
moments='mv')
sig_ref = np.sqrt(var_ref)
mu_ref = mu_ref * p_aff
sig_ref = sig_ref * p_aff
assert_allclose(DV_desc['mean'].values[::2],
[np.nan]*4 + [mu_ref]*4,
rtol=beta_ds1 / 10.)
assert_allclose(DV_desc['std'].values[::2],
[np.nan] * 4 + [sig_ref] * 4,
rtol=0.20)
assert np.mean(
DV_desc['std'].values[::2][4:]) == pytest.approx(
sig_ref, rel=0.1)
# DS2
# the affected population in DS1 per unit quantity (identical
# for all FGs and injury levels)
p_aff = 0.1
mu_ref, var_ref = tnorm.stats(-1. / beta_ds2, (
1. - mu_ds2) / mu_ds2 / beta_ds2, loc=mu_ds2,
scale=mu_ds2 * beta_ds2,
moments='mv')
sig_ref = np.sqrt(var_ref)
mu_ref = mu_ref * p_aff
sig_ref = sig_ref * p_aff
assert_allclose(DV_desc['mean'].values[1::2],
[np.nan] * 4 + [mu_ref] * 4,
rtol=beta_ds2 / 10.)
assert_allclose(DV_desc['std'].values[1::2],
[np.nan] * 4 + [sig_ref] * 4,
rtol=0.20)
assert np.mean(
DV_desc['std'].values[1::2][4:]) == pytest.approx(
sig_ref, rel=0.1)
# red tags have to be treated separately
DV_RED = A._DV_dict['red_tag']
DMG_norm = DMG_check / 25.
assert len(DV_RED.columns) == 8
for i in range(8):
dmg_i = i+8
is_dam = pd.DataFrame(np.zeros((len(DMG_norm.index), 5)),
columns=range(5))
is_dam[0] = (DMG_norm.iloc[:, dmg_i] < 0.01)
is_dam[1] = (DMG_norm.iloc[:, dmg_i] > 0.01) & (
DMG_norm.iloc[:, dmg_i] < 0.275)
is_dam[2] = (DMG_norm.iloc[:, dmg_i] > 0.275) & (
DMG_norm.iloc[:, dmg_i] < 0.525)
is_dam[3] = (DMG_norm.iloc[:, dmg_i] > 0.525) & (
DMG_norm.iloc[:, dmg_i] < 0.775)
is_dam[4] = (DMG_norm.iloc[:, dmg_i] > 0.775)
mu_red = ([0.50, 0.23185] * 4)[i]
beta_red = ([0.33, 0.73] * 4)[i]
mu_ref = np.zeros(5)
mu_ref[1] = tnorm.cdf(0.25, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
mu_ref[2] = tnorm.cdf(0.50, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
mu_ref[3] = tnorm.cdf(0.75, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
mu_ref[4] = tnorm.cdf(1.00, -1. / beta_red,
(1. - mu_red) / mu_red / beta_red,
loc=mu_red, scale=mu_red * beta_red)
sample_count = np.array(
[(DV_RED.iloc[:, i])[is_dam[c]].describe().loc['count'] for
c in range(5)])
mu_test = np.array(
[(DV_RED.iloc[:, i])[is_dam[c]].describe().loc['mean'] for c
in range(5)])
assert mu_test[0] == 0.
for step in range(1, 5):
if sample_count[step] > 0:
assert mu_test[step] == pytest.approx(
mu_ref[step],
abs=5 * 0.4 / np.sqrt(sample_count[step]))
# CORRELATIONS
# repair and injury correlations
DV_REP = pd.concat([DV_COST, DV_TIME], axis=1)
DV_INJ = pd.concat([DV_INJ0, DV_INJ1], axis=1)
for DV, RV, dv_tag in zip([DV_REP, DV_INJ, DV_RED],
[RV_REP, RV_INJ, RV_RED],
['rep', 'inj', 'red']):
if dv_tag == 'rep':
# transform the lognormal variables to log scale
log_flags = ([True, False] * 8 +
[False, True] * 4 +
[True, False] * 4)
for c, is_log in enumerate(log_flags):
if is_log:
DV.iloc[:, c] = np.log(DV.iloc[:, c])
if dv_tag == 'inj':
# remove the columns with only nan values from DV
DV = pd.concat([DV.iloc[:,8:16], DV.iloc[:,24:32]], axis=1)
elif dv_tag == 'red':
DV_RED_n = pd.DataFrame(np.ones(DV.shape) * np.nan,
index=DV.index, columns=DV.columns)
DMG_filter = pd.concat(
[(DMG_check.iloc[:, [8, 10, 12, 14]] / 25.0 > 0.275) & (
DMG_check.iloc[:, [8, 10, 12, 14]] / 25.0 < 0.525),
(DMG_check.iloc[:, [9, 11, 13, 15]] / 25.0 > 0.025) & (
DMG_check.iloc[:,
[9, 11, 13, 15]] / 25.0 < 0.275)], axis=1)
DV_RED_n[DMG_filter] = DV_RED[DMG_filter]
DV = DV_RED_n
DV_corr = DV.corr()
# use the correlations specified for the random variable as
# reference (that we already verified earlier)
# COV_ref = RV.COV
# sig_ref = np.sqrt(np.diagonal(COV_ref))
# rho_ref = COV_ref / np.outer(sig_ref, sig_ref)
rho_ref = RV[0].RV_set.Rho([rv.name for rv in RV])
# perform the tests
for i in range(len(DV_corr.index)):
for j in range(len(DV_corr.columns)):
ref_i = rho_ref[i, j]
if ref_i != 0.0:
if ref_i > 0.0:
assert DV_corr.iloc[i, j] > 0.97 * ref_i
else:
assert DV_corr.iloc[i, j] < 0.0
else:
assert DV_corr.iloc[i, j] == pytest.approx(ref_i,
abs=0.15)
# ---------------------------------------------------------------------
A.aggregate_results()
# -------------------------------------------- check result aggregation
# Aggregate results are checked in detail by other tests.
# Here we only focus on some simple checks to make sure the results
# make sense.
S = A._SUMMARY
SD = S.describe().T
assert SD.loc[('inhabitants', ''), 'mean'] == 20.0
assert SD.loc[('inhabitants', ''), 'std'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'mean'] == 0.0
assert SD.loc[('collapses', 'collapsed'), 'std'] == 0.0
assert_allclose(A._DV_dict['rec_cost'].sum(axis=1),
S.loc[:, ('reconstruction', 'cost')])
assert_allclose(A._DV_dict['rec_time'].sum(axis=1),
S.loc[:, ('reconstruction', 'time-sequential')])
assert_allclose(A._DV_dict['rec_time'].max(axis=1),
S.loc[:, ('reconstruction', 'time-parallel')])
assert_allclose(A._DV_dict['injuries'][0].sum(axis=1),
S.loc[:, ('injuries', 'sev1')])
assert_allclose(A._DV_dict['injuries'][1].sum(axis=1),
S.loc[:, ('injuries', 'sev2')])
# print()
| 48.685858
| 122
| 0.446085
| 38,788
| 222,397
| 2.431087
| 0.021037
| 0.175
| 0.227505
| 0.280433
| 0.893698
| 0.878522
| 0.868511
| 0.854683
| 0.835912
| 0.827375
| 0
| 0.168707
| 0.32705
| 222,397
| 4,568
| 123
| 48.685858
| 0.46136
| 0.108023
| 0
| 0.752451
| 0
| 0
| 0.022046
| 0.000426
| 0
| 0
| 0
| 0
| 0.148897
| 1
| 0.005515
| false
| 0
| 0.002757
| 0
| 0.008272
| 0.000613
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
677eb94878c2ce06ff4d9f0c0c17be59e3848bb5
| 92
|
py
|
Python
|
apps/oclib/client/__init__.py
|
leigeng2014/sango2
|
aa0a3ed1a316d8afc9482f072f2aa57cffe9a10f
|
[
"Apache-2.0"
] | null | null | null |
apps/oclib/client/__init__.py
|
leigeng2014/sango2
|
aa0a3ed1a316d8afc9482f072f2aa57cffe9a10f
|
[
"Apache-2.0"
] | null | null | null |
apps/oclib/client/__init__.py
|
leigeng2014/sango2
|
aa0a3ed1a316d8afc9482f072f2aa57cffe9a10f
|
[
"Apache-2.0"
] | null | null | null |
from apps.oclib.client.ocmongo import Mongo
from apps.oclib.client.ocredis import Redis
| 23
| 44
| 0.804348
| 14
| 92
| 5.285714
| 0.642857
| 0.216216
| 0.351351
| 0.513514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 92
| 3
| 45
| 30.666667
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
67aab2542b7900005a09861e1ae9cb6269a490b8
| 75,640
|
py
|
Python
|
src/framework/visualization/offlinePlotter.py
|
securedataplane/mts
|
9ffe415ce586600e558e7a2855348c9cd1651f49
|
[
"MIT"
] | 1
|
2022-03-10T13:00:25.000Z
|
2022-03-10T13:00:25.000Z
|
src/framework/visualization/offlinePlotter.py
|
securedataplane/mts
|
9ffe415ce586600e558e7a2855348c9cd1651f49
|
[
"MIT"
] | 1
|
2019-07-23T08:49:09.000Z
|
2019-07-23T08:49:09.000Z
|
src/framework/visualization/offlinePlotter.py
|
securedataplane/mts
|
9ffe415ce586600e558e7a2855348c9cd1651f49
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib
# matplotlib.use('Agg')
import matplotlib.pyplot as plt
from numpy import arange
from scipy.interpolate import spline
from pylab import *
import itertools
import json
import time
import re
from datetime import datetime, tzinfo, timedelta
import glob
from matplotlib.patches import Rectangle
pcapAnalysisPathThroughput = "/home/hashkash/Documents/TUB/my_work/netVirtSec/secureDataPlane/evaluation/analysis/nsdi-submission/throughput/sharedCPU/"
pcapAnalysisPathLatency = "/home/hashkash/Documents/TUB/my_work/netVirtSec/secureDataPlane/evaluation/analysis/nsdi-submission/latency/sharedCPU/"
pcapAnalysisPathThroughputIsolated = "/home/hashkash/Documents/TUB/my_work/netVirtSec/secureDataPlane/evaluation/analysis/nsdi-submission/throughput/isolatedCPU/"
pcapAnalysisPathLatencyIsolated = "/home/hashkash/Documents/TUB/my_work/netVirtSec/secureDataPlane/evaluation/analysis/nsdi-submission/latency/isolatedCPU/"
# pcapAnalysisPathLatency = "/tmp/testing/nsdi/latency/sharedCPU/"
experiments = ["throughput", "latency"]
topology = "phy2phy"
topology = "phy2vm2vm2phy"
topologies = ["phy2phy", "phy2vm2vm2phy"]
# topology = "phy2phy"
# topology = "phy2vm2vm2phy"
labels = ["64bytes", "512bytes", "1500bytes", "2048bytes", "9000bytes"]
labels = ["64bytes", "512bytes", "1500bytes", "2048bytes"]
lat_packet_start_index = 500
lat_packet_end_index = 10500
topologies = ["phy2phy", "phy2vm2vm2phy"]
# SRIOV_*_MultiTenant is single OVSVM
vswitchModes = ["Baseline_NoDPDK", "Baseline_DPDK", "SRIOV_NoDPDK", "SRIOV_DPDK",
"Baseline_MultiTenant_NoDPDK", "Baseline_MultiTenant_DPDK",
"SRIOV_MultiTenant_NoDPDK", "SRIOV_MultiTenant_DPDK",
"SRIOV_MultiOvs_NoDPDK", "SRIOV_MultiOvs_DPDK"]
print "topologies: " + str(topologies)
print "vswitchModes: " + str(vswitchModes)
def plotThroughput(pcapAnalysisPath, topology):
baseline_noDpdk_tx, baseline_noDpdk_rx = [], []
baseline_Dpdk_tx, baseline_Dpdk_rx = [], []
sriov_dpdk_tx, sriov_dpdk_rx = [], []
sriov_noDpdk_tx, sriov_noDpdk_rx = [], []
if topology == "phy2phy":
baseline_noDpdk_tx, baseline_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_NoDPDK-planeelbe-*')
baseline_Dpdk_tx, baseline_Dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_DPDK-planeelbe-*')
sriov_dpdk_tx, sriov_dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_DPDK-planeelbe-*')
sriov_noDpdk_tx, sriov_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_NoDPDK-planeelbe-*')
elif topology == "phy2vm2vm2phy":
baseline_noDpdk_tx, baseline_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_NoDPDK-planeelbe-*')
baseline_Dpdk_tx, baseline_Dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_DPDK-planeelbe-*')
sriov_dpdk_tx, sriov_dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_DPDK-planeelbe-*')
sriov_noDpdk_tx, sriov_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_NoDPDK-planeelbe-*')
print baseline_noDpdk_tx, baseline_noDpdk_rx
print baseline_Dpdk_tx, baseline_Dpdk_rx
print sriov_dpdk_tx, sriov_dpdk_rx
print sriov_noDpdk_tx, sriov_noDpdk_rx
fig = plt.figure(1, figsize=(8.75, 4.6), frameon=True)
ax = plt.subplot(111)
plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^'))
# plt.plot(baseline_noDpdk_tx, baseline_noDpdk_rx, marker=marker.next(), color='#79c36a', linestyle='', label='baseline_nodpdk', markersize=9)
# plt.plot(baseline_Dpdk_tx, baseline_Dpdk_rx, marker=marker.next(), color='#79c36a', linestyle='', label='baseline_dpdk', markersize=9)
# plt.plot(sriov_noDpdk_tx, sriov_noDpdk_rx, marker=marker.next(), color='#599ad3', linestyle='', label='sriov_nodpdk', markersize=9)
# plt.plot(sriov_dpdk_tx, sriov_dpdk_rx, marker=marker.next(), color='#727272', linestyle='', label='sriov_dpdk', markersize=9)
plt.plot(baseline_noDpdk_tx, baseline_noDpdk_rx, label='baseline_nodpdk', marker=marker.next(), linestyle='')
plt.plot(baseline_Dpdk_tx, baseline_Dpdk_rx, label='baseline_dpdk', marker=marker.next(), linestyle='')
plt.plot(sriov_noDpdk_tx, sriov_noDpdk_rx, label='sriov_nodpdk', marker=marker.next(), linestyle='')
plt.plot(sriov_dpdk_tx, sriov_dpdk_rx, label='sriov_dpdk', marker=marker.next(), linestyle='')
# plt.ylim((300000, 700000 + 20000))
# plt.xlim((300000, 1500000 + 20000))
plt.ylabel('Packets/s Forwarded (k packets/s)')
plt.xlabel("Offered load (k packets/s)")
ax.legend(loc='lower center', ncol=2, bbox_to_anchor=(0.5, -0.45), numpoints=1)
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.25, box.width * 1.0, box.height * 0.75])
ax.set_axisbelow(True)
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'.png', dpi=(250), format='png')
plt.close()
def plotThroughputMulti(pcapAnalysisPath, topology):
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = [], []
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = [], []
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = [], []
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = [], []
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = [], []
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = [], []
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = [], []
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = [], []
if topology == "phy2phy":
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_NoDPDK-planeelbe-*')
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_NoDPDK-planeelbe-*')
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
elif topology == "phy2vm2vm2phy":
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_NoDPDK-planeelbe-*')
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_NoDPDK-planeelbe-*')
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
print Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx
print Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx
print SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx
print SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx
print SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx
print SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx
print SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx
print SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx
fig = plt.figure(1, figsize=(8.75, 4.6), frameon=True)
ax = plt.subplot(111)
plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^', 'p'))
# plt.plot(Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx, marker=marker.next(), color='#79c36a', linestyle='', label='Baseline_MultiTenant_NoDPDK', markersize=9)
# plt.plot(SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx, marker=marker.next(), color='#599ad3', linestyle='', label='SRIOV_MultiTenant_DPDK', markersize=9)
# plt.plot(SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx, marker=marker.next(), color='#727272', linestyle='', label='SRIOV_MultiTenant_NoDPDK', markersize=9)
# plt.plot(SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx, marker=marker.next(), color='#599ad3', linestyle='',
# label='SRIOV_MultiOvs_DPDK', markersize=9)
# plt.plot(SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx, marker=marker.next(), color='#727272',
# linestyle='', label='SRIOV_MultiOvs_NoDPDK', markersize=9)
plt.plot(Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx, label='Baseline_MultiTenant_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx, label='Baseline_MultiTenant_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx, label='SRIOV_MultiTenant_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx, label='SRIOV_MultiTenant_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx, label='SRIOV_MultiOvs_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx, label='SRIOV_MultiOvs_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx, label='SRIOV_MultiOvs_NoDPDK_Isolated', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx, label='SRIOV_MultiOvs_DPDK_Isolated', marker=marker.next(), linestyle='')
# plt.ylim((300000, 1400000 + 20000))
# plt.xlim((300000, 1400000 + 20000))
plt.ylabel('Packets/s Forwarded (k packets/s)')
plt.xlabel("Offered load (k packets/s)")
ax.legend(loc='lower center', ncol=2, bbox_to_anchor=(0.5, -0.45), numpoints=1)
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.25, box.width * 1.0, box.height * 0.75])
ax.set_axisbelow(True)
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Multi.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Multi.png', dpi=(320), format='png')
plt.close()
def plotThroughputSplit(pcapAnalysisPath, topology):
baseline_noDpdk_tx, baseline_noDpdk_rx = [], []
baseline_Dpdk_tx, baseline_Dpdk_rx = [], []
sriov_dpdk_tx, sriov_dpdk_rx = [], []
sriov_noDpdk_tx, sriov_noDpdk_rx = [], []
if topology == "phy2phy":
baseline_noDpdk_tx, baseline_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_NoDPDK-planeelbe-*')
baseline_Dpdk_tx, baseline_Dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_DPDK-planeelbe-*')
sriov_dpdk_tx, sriov_dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_DPDK-planeelbe-*')
sriov_noDpdk_tx, sriov_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_NoDPDK-planeelbe-*')
elif topology == "phy2vm2vm2phy":
baseline_noDpdk_tx, baseline_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_NoDPDK-planeelbe-*')
baseline_Dpdk_tx, baseline_Dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_DPDK-planeelbe-*')
sriov_dpdk_tx, sriov_dpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_DPDK-planeelbe-*')
sriov_noDpdk_tx, sriov_noDpdk_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_NoDPDK-planeelbe-*')
print baseline_noDpdk_tx, baseline_noDpdk_rx
print baseline_Dpdk_tx, baseline_Dpdk_rx
print sriov_dpdk_tx, sriov_dpdk_rx
print sriov_noDpdk_tx, sriov_noDpdk_rx
fig = plt.figure(1, figsize = (3.487, 2.15512978986403),frameon=True)
ax = plt.subplot(1, 2, 1)
plt.tight_layout()
plt.grid(True)
# marker = itertools.cycle(('+', '.', 'x', '4'))
marker = itertools.cycle(('.', '+', 'x', '_', '1', '2', '3', '4'))
plt.plot(baseline_noDpdk_tx, baseline_noDpdk_rx, label='Baseline', marker=marker.next(), linestyle='', fillstyle="none", color="black")
# plt.plot(baseline_Dpdk_tx, baseline_Dpdk_rx, label='baseline_dpdk', marker=marker.next(), linestyle='')
plt.plot(sriov_noDpdk_tx, sriov_noDpdk_rx, label='1 vswitch VM', marker=marker.next(), linestyle='', fillstyle="none")
# plt.plot(sriov_dpdk_tx, sriov_dpdk_rx, label='sriov_dpdk', marker=marker.next(), linestyle='')
if topology == "phy2vm2vm2phy":
plt.ylim((0, 1400))
else:
plt.ylim((400, 1400))
# plt.xlim((400, 1400))
plt.xticks(range(400, 1500, 400), tuple(range(400, 1500, 400)))
plt.ylabel('Received load (k packets/s)')
# plt.xlabel("Offered load (k packets/s)")
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * 0.90, box.height * 0.75])
# ax.legend(loc='lower center', ncol=2, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
ax.set_axisbelow(True)
plt.figlegend(loc='lower center', ncol=2)
### Second plot with dpdk
ax = plt.subplot(1, 2, 2)
plt.grid(True)
marker = itertools.cycle(('.', '+', 'x', '_', '1', '2', '3', '4'))
# plt.plot(baseline_noDpdk_tx, baseline_noDpdk_rx, label='B: Baseline', marker=marker.next(), linestyle='', fillstyle="none")
plt.plot(baseline_Dpdk_tx, baseline_Dpdk_rx, label='Baseline', marker=marker.next(), linestyle='', fillstyle="none", color="black")
# plt.plot(sriov_noDpdk_tx, sriov_noDpdk_rx, label='P1: Principle 1', marker=marker.next(), linestyle='', fillstyle="none")
plt.plot(sriov_dpdk_tx, sriov_dpdk_rx, label='1 vswitch VM', marker=marker.next(), linestyle='', fillstyle="none")
if topology == "phy2vm2vm2phy":
plt.ylim((0, 1400))
else:
plt.ylim((400, 1400))
# plt.ylim((400, 1400))
plt.xticks(range(400, 1500, 400), tuple(range(400, 1500, 400)))
plt.figtext(0.35, 0.2, "Offered load (k packets/s)", color="black")
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * .90, box.height * 0.75])
ax.set_axisbelow(True)
plt.figtext(0.26, 0.12, "No DPDK", color="black")
plt.figtext(0.71, 0.12, "With DPDK", color="black")
# plt.figlegend(loc='lower center', ncol=2)#, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
# ax.legend(marker, ['Baseline', 'Principle 1', 'Baselin + 3', 'Principle 1 + 3'], handletextpad=-0.18, handlelength=0, markerscale=0, loc='lower center', ncol=3, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Split.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Split.png', dpi=(250), format='png')
plt.close()
def plotThroughputMultiSplit(pcapAnalysisPath, topology):
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = [], []
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = [], []
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = [], []
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = [], []
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = [], []
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = [], []
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = [], []
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = [], []
if topology == "phy2phy":
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_NoDPDK-planeelbe-*')
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_NoDPDK-planeelbe-*')
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
elif topology == "phy2vm2vm2phy":
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_NoDPDK-planeelbe-*')
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_NoDPDK-planeelbe-*')
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = get_tput_dict(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
print Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx
print Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx
print SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx
print SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx
print SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx
print SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx
print SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx
print SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx
fig = plt.figure(1, figsize = (3.487, 2.15512978986403),frameon=True)
ax = plt.subplot(1, 2, 1)
plt.tight_layout()
plt.grid(True)
# marker = itertools.cycle(('+', '.', 'x', '_', '1', '2', '3', '4'))
marker = itertools.cycle(('.', '+', 'x', '_', '1', '2', '3', '4'))
plt.plot(Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx, label='Baseline', marker=marker.next(), linestyle='', fillstyle="none", color="black")
# plt.plot(Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx, label='Baseline_MultiTenant_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx, label='1 vswitch VM', marker=marker.next(), linestyle='', fillstyle="none")
# plt.plot(SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx, label='SRIOV_MultiTenant_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx, label='2 vswitch VMs (shared CPU)', marker=marker.next(), linestyle='', fillstyle="none")
# plt.plot(SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx, label='SRIOV_MultiOvs_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx, label='2 vswitch VMs (isolated CPU)', marker=marker.next(), linestyle='', fillstyle="none")
# plt.plot(SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx, label='SRIOV_MultiOvs_DPDK_Isolated', marker=marker.next(), linestyle='')
if topology == "phy2vm2vm2phy":
plt.ylim((0, 1400))
else:
plt.ylim((400, 1400))
# plt.xlim((400, 1400))
plt.xticks(range(400, 1500, 400), tuple(range(400, 1500, 400)))
plt.ylabel('Received load (k packets/s)')
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.29, box.width * 0.90, box.height * 0.75])
# ax.legend(loc='lower center', ncol=2, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
ax.set_axisbelow(True)
plt.figlegend(loc='lower center', ncol=2, handletextpad=-0.18)
### Second plot with dpdk
ax = plt.subplot(1, 2, 2)
plt.grid(True)
marker = itertools.cycle(('.', '+', 'x', '_', '1', '2', '3', '4'))
# plt.plot(Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx, label='Baseline_MultiTenant_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx, label='Baseline', marker=marker.next(), linestyle='', fillstyle="none", color="black")
# plt.plot(SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx, label='SRIOV_MultiTenant_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx, label='1 vswitch VM', marker=marker.next(), linestyle='', fillstyle="none")
# plt.plot(SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx, label='SRIOV_MultiOvs_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx, label='2 vswitch VM (shared CPU) + 3', marker=marker.next(), linestyle='', fillstyle="none")
# plt.plot(SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx, label='SRIOV_MultiOvs_NoDPDK_Isolated', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx, label='2 vswitch VM (isolated CPU)', marker=marker.next(), linestyle='', fillstyle="none")
if topology == "phy2vm2vm2phy":
plt.ylim((0, 1400))
else:
plt.ylim((400, 1400))
plt.xticks(range(400, 1500, 400), tuple(range(400, 1500, 400)))
plt.figtext(0.35, 0.24, "Offered load (k packets/s)", color="black")
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.29, box.width * .90, box.height * 0.75])
ax.set_axisbelow(True)
plt.figtext(0.26, 0.19, "No DPDK", color="black")
plt.figtext(0.71, 0.19, "With DPDK", color="black")
# plt.figlegend(loc='lower center', ncol=2, handletextpad=-0.18)#, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Multi-Split.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Multi-Split.png', dpi=(320), format='png')
plt.close()
def get_tput_dict(txPath, rxPath):
print "get_tput_dict()"
x1 = []
y1 = []
try:
d = glob.glob(rxPath)
d.sort()
for i in d:
# print "y1 parsedicts:"
y1.append(parse_tput_dict(i))
print parse_tput_dict(i)
d = glob.glob(txPath)
d.sort()
for i in d:
# print "x1 parsedicts:"
x1.append(parse_tput_dict(i))
print parse_tput_dict(i)
# exit()
return x1, y1
except:
x1 = []
y1 = []
def plotThroughputLoss(pcapAnalysisPath, topology):
baseline_noDpdk_tx, baseline_noDpdk_rx = [], []
baseline_Dpdk_tx, baseline_Dpdk_rx = [], []
sriov_dpdk_tx, sriov_dpdk_rx = [], []
sriov_noDpdk_tx, sriov_noDpdk_rx = [], []
if topology == "phy2phy":
baseline_noDpdk_tx, baseline_noDpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-Baseline_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_NoDPDK-planeelbe-*')
baseline_Dpdk_tx, baseline_Dpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-Baseline_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_DPDK-planeelbe-*')
sriov_dpdk_tx, sriov_dpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_DPDK-planeelbe-*')
sriov_noDpdk_tx, sriov_noDpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_NoDPDK-planeelbe-*')
elif topology == "phy2vm2vm2phy":
baseline_noDpdk_tx, baseline_noDpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_NoDPDK-planeelbe-*')
baseline_Dpdk_tx, baseline_Dpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_DPDK-planeelbe-*')
sriov_dpdk_tx, sriov_dpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_DPDK-planeelbe-*')
sriov_noDpdk_tx, sriov_noDpdk_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_NoDPDK-planeelbe-*')
print baseline_noDpdk_tx, baseline_noDpdk_rx
print baseline_Dpdk_tx, baseline_Dpdk_rx
print sriov_dpdk_tx, sriov_dpdk_rx
print sriov_noDpdk_tx, sriov_noDpdk_rx
fig = plt.figure(1, figsize=(8.75, 4.6), frameon=True)
ax = plt.subplot(111)
plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^'))
# plt.plot(baseline_noDpdk_tx, baseline_noDpdk_rx, marker=marker.next(), color='#79c36a', linestyle='', label='baseline_nodpdk', markersize=9)
# plt.plot(baseline_Dpdk_tx, baseline_Dpdk_rx, marker=marker.next(), color='#79c36a', linestyle='', label='baseline_dpdk', markersize=9)
# plt.plot(sriov_noDpdk_tx, sriov_noDpdk_rx, marker=marker.next(), color='#599ad3', linestyle='', label='sriov_nodpdk', markersize=9)
# plt.plot(sriov_dpdk_tx, sriov_dpdk_rx, marker=marker.next(), color='#727272', linestyle='', label='sriov_dpdk', markersize=9)
plt.plot(baseline_noDpdk_tx, baseline_noDpdk_rx, label='baseline_nodpdk', marker=marker.next(), linestyle='')
plt.plot(baseline_Dpdk_tx, baseline_Dpdk_rx, label='baseline_dpdk', marker=marker.next(), linestyle='')
plt.plot(sriov_noDpdk_tx, sriov_noDpdk_rx, label='sriov_nodpdk', marker=marker.next(), linestyle='')
plt.plot(sriov_dpdk_tx, sriov_dpdk_rx, label='sriov_dpdk', marker=marker.next(), linestyle='')
# plt.ylim((300000, 700000 + 20000))
# plt.xlim((300000, 1500000 + 20000))
plt.ylim((0.000,0.99))
# plt.xlim((10000,35000))
plt.ylabel('Packet Loss$(Percent)$')
plt.xlabel("Packets/s Sent")
ax.set_yscale('symlog')
ax.set_yticks((0.00, 0.01, 0.10, 0.20, 0.30, 0.40)) #, ("5\%", "10\%", "15\%", "20\%", "25\%", "30\%", "35\%", "40\%", "45\%", "50\%"))
ax.set_yticklabels(('0%', '1%', '10%', '20%', '30%', '40%'))
# ax.set_xticklabels(('k', '15k', '20k', '25k', '30k', '35k'))
ax.legend(loc='lower center', ncol=2, bbox_to_anchor=(0.5, -0.45), numpoints=1)
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.25, box.width * 1.0, box.height * 0.75])
ax.set_axisbelow(True)
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Loss.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Loss.png', dpi=(250), format='png')
plt.close()
def plotThroughputMultiLoss(pcapAnalysisPath, topology):
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = [], []
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = [], []
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = [], []
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = [], []
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = [], []
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = [], []
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = [], []
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = [], []
if topology == "phy2phy":
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_NoDPDK-planeelbe-*')
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-Baseline_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_NoDPDK-planeelbe-*')
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
elif topology == "phy2vm2vm2phy":
Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_NoDPDK-planeelbe-*')
Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-Baseline_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_NoDPDK-planeelbe-*')
SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiTenant_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx = get_tput_dict_loss(
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPath+'phy2vm2vm2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_NoDPDK-planeelbe-*')
SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx = get_tput_dict(
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-elbeplane-*',
pcapAnalysisPathThroughputIsolated+'phy2phy-throughput-SRIOV_MultiOvs_DPDK-planeelbe-*')
print Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx
print Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx
print SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx
print SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx
print SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx
print SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx
print SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx
print SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx
fig = plt.figure(1, figsize=(8.75, 4.6), frameon=True)
ax = plt.subplot(111)
plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^', 'p'))
# plt.plot(Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx, marker=marker.next(), color='#79c36a', linestyle='', label='Baseline_MultiTenant_NoDPDK', markersize=9)
# plt.plot(SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx, marker=marker.next(), color='#599ad3', linestyle='', label='SRIOV_MultiTenant_DPDK', markersize=9)
# plt.plot(SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx, marker=marker.next(), color='#727272', linestyle='', label='SRIOV_MultiTenant_NoDPDK', markersize=9)
# plt.plot(SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx, marker=marker.next(), color='#599ad3', linestyle='',
# label='SRIOV_MultiOvs_DPDK', markersize=9)
# plt.plot(SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx, marker=marker.next(), color='#727272',
# linestyle='', label='SRIOV_MultiOvs_NoDPDK', markersize=9)
plt.plot(Baseline_MultiTenant_NoDPDK_tx, Baseline_MultiTenant_NoDPDK_rx, label='Baseline_MultiTenant_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(Baseline_MultiTenant_DPDK_tx, Baseline_MultiTenant_DPDK_rx, label='Baseline_MultiTenant_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiTenant_NoDPDK_tx, SRIOV_MultiTenant_NoDPDK_rx, label='SRIOV_MultiTenant_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiTenant_DPDK_tx, SRIOV_MultiTenant_DPDK_rx, label='SRIOV_MultiTenant_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_NoDPDK_tx, SRIOV_MultiOvs_NoDPDK_rx, label='SRIOV_MultiOvs_NoDPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_DPDK_tx, SRIOV_MultiOvs_DPDK_rx, label='SRIOV_MultiOvs_DPDK', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_NoDPDK_Isolated_tx, SRIOV_MultiOvs_NoDPDK_Isolated_rx, label='SRIOV_MultiOvs_NoDPDK_Isolated', marker=marker.next(), linestyle='')
plt.plot(SRIOV_MultiOvs_DPDK_Isolated_tx, SRIOV_MultiOvs_DPDK_Isolated_rx, label='SRIOV_MultiOvs_DPDK_Isolated', marker=marker.next(), linestyle='')
# plt.ylim((300000, 700000 + 20000))
# plt.xlim((300000, 1500000 + 20000))
plt.ylim((0.000,0.99))
# plt.xlim((10000,35000))
plt.ylabel('Packet Loss$(Percent)$')
plt.xlabel("Packets/s Sent")
ax.set_yscale('symlog')
ax.set_yticks((0.00, 0.01, 0.10, 0.20, 0.30, 0.40)) #, ("5\%", "10\%", "15\%", "20\%", "25\%", "30\%", "35\%", "40\%", "45\%", "50\%"))
ax.set_yticklabels(('0%', '1%', '10%', '20%', '30%', '40%'))
# ax.set_xticklabels(('k', '15k', '20k', '25k', '30k', '35k'))
ax.legend(loc='lower center', ncol=2, bbox_to_anchor=(0.5, -0.45), numpoints=1)
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.25, box.width * 1.0, box.height * 0.75])
ax.set_axisbelow(True)
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Multi-Loss.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_tput_'+topology+'-Multi-Loss.png', dpi=(320), format='png')
plt.close()
def get_tput_dict_loss(txPath, rxPath):
print "get_tput_dict()"
x1 = []
x11 = []
y1 = []
try:
d = glob.glob(txPath)
d.sort()
print d
for i in d:
print i
temp = i.split('-')[5]
print "temp: " + str(temp)
nmbr = int(temp)
# nmbr = int(float(temp.split('-')[5]))
z = parse_tput_dict(i) * 1000
print z
x11.append(z)
x1.append(nmbr)
print str(parse_tput_dict(i))
d = glob.glob(rxPath)
d.sort()
c1 = 0
for i in d:
c2 = 1 - float(parse_tput_dict(i)*1000) / x11[c1]
y1.append(c2)
#y1.append(parse_dicts(i))
c1 = c1 + 1
return x1, y1
except:
x1 = []
y1 = []
def parse_tput_dict(dict_data):
for l in open(dict_data):
if l.split()[0] == 'Average':
return int(float(l.split()[3])/1000)
def plotLatency(pcapAnalysisPath,topology):
baseline_noDpdk = {}
baseline_Dpdk = {}
sriov_dpdk = {}
sriov_noDpdk = {}
if topology == "phy2phy":
baseline_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_NoDPDK-')
baseline_Dpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_DPDK-')
sriov_dpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_DPDK-')
sriov_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_NoDPDK-')
elif topology == "phy2vm2vm2phy":
baseline_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_NoDPDK-')
baseline_Dpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_DPDK-')
sriov_dpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_DPDK-')
sriov_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_NoDPDK-')
# print baseline_noDpdk
# print sriov_dpdk
# print sriov_noDpdk
fig = plt.figure(1, figsize = (8.75,4.6),frameon=True)
fig.autofmt_xdate(bottom=0.1, rotation=90, ha='right')
ax = plt.subplot(111)
c = 0
data = []
xmark = []
data.append([])
xmark.append("")
c = 0
for l in labels:
data.append(baseline_noDpdk[l])
xmark.append('baseline-nodpdk')
data.append(baseline_Dpdk[l])
xmark.append('baseline-dpdk')
data.append(sriov_noDpdk[l])
xmark.append('sriov-nodpdk')
data.append(sriov_dpdk[l])
xmark.append('sriov-dpdk')
ax.text(3.0, 10000.05, u'64$B$')
ax.text(7.0, 10000.05, u'512$B$')
ax.text(11.0, 10000.05, u'1500$B$')
ax.text(15.0, 10000.05, u'2048$B$')
# ax.text(18.0, 10000.05, u'9000$B$')
bp_dict = plt.boxplot(data, patch_artist=False)
plt.setp(bp_dict['whiskers'], color='black', linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color='blue', linewidth=1, marker='+', markersize=2)
plt.setp(bp_dict['boxes'], linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks(range(1, 19), tuple(xmark), rotation='-45', ha='left')
# Print median values for debug
# medians=[]
# for line in bp_dict['medians']:
# # get position data for median line
# x, y = line.get_xydata()[1] # top of median line
# # overlay median value
# text(x, y, '%.4f' % y,
# horizontalalignment='center', fontsize=5) # draw above, centered
# print "%.4f" % y
# medians.append(y)
# plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^'))
plt.plot([1.0, 1.0], [-1, 10000], color='#000000')
plt.plot([5.5, 5.5], [-1, 10000], color='#000000')
plt.plot([9.5, 9.5], [-1, 10000], color='#000000')
plt.plot([13.5, 13.5], [-1, 10000], color='#000000')
plt.plot([17.5, 17.5], [-1, 10000], color='#000000')
plt.ylim((0.001,10))
plt.ylabel('Latency in millisecond')
plt.xlabel("Scenario mode")
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.25, box.width * 1.0, box.height * 0.78])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
# ax.set_xscale('log')
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'.png', dpi=(250), format='png')
plt.close()
def plotLatencySplitSingles(pcapAnalysisPath,topology):
baseline_noDpdk = {}
baseline_Dpdk = {}
sriov_dpdk = {}
sriov_noDpdk = {}
if topology == "phy2phy":
baseline_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_NoDPDK-')
baseline_Dpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_DPDK-')
sriov_dpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_DPDK-')
sriov_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_NoDPDK-')
elif topology == "phy2vm2vm2phy":
baseline_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_NoDPDK-')
baseline_Dpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_DPDK-')
sriov_dpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_DPDK-')
sriov_noDpdk = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_NoDPDK-')
# print baseline_noDpdk
# print sriov_dpdk
# print sriov_noDpdk
fig = plt.figure(1, figsize = (3.487, 2.15512978986403),frameon=True)
fig.autofmt_xdate(bottom=0.1, rotation=90, ha='right')
ax = plt.subplot(1, 2, 1)
plt.tight_layout()
c = 0
data = []
xmark = []
c = 0
labels = ["64bytes"]
for l in labels:
data.append(baseline_noDpdk[l])
xmark.append('Baseline')
# data.append(baseline_Dpdk[l])
# xmark.append('baseline-dpdk')
data.append(sriov_noDpdk[l])
xmark.append(' 1 vswitch\nVM')
# data.append(sriov_dpdk[l])
# xmark.append('sriov-dpdk')
bp_dict = plt.boxplot(data, patch_artist=False)
plt.setp(bp_dict['whiskers'], color='black', linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color='blue', linewidth=1, marker='+', markersize=1)
plt.setp(bp_dict['boxes'], linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks([1, 2], tuple(["B", "1"]))
plt.plot([1.5, 1.5], [-1, 10000], color='#000000')
# plt.axvspan(1.5, 5.0, facecolor='0.6', alpha=0.5)
plt.ylim((1,10000))
plt.ylabel('Latency (microsecond)')
# ax.add_patch(Rectangle((1.49, .9), 1, 10002, alpha=0.1, color='blue'))
# ax.add_patch(Rectangle((2.49, .9), 1, 10002, alpha=0.1, color='orange'))
# ax.add_patch(Rectangle((3.49, .9), 1, 10002, alpha=0.1, color='green'))
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * 0.91, box.height * 0.80])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
### Second plot with dpdk
ax = plt.subplot(1, 2, 2)
c = 0
data = []
xmark = []
# data.append([])
# xmark.append("")
c = 0
for l in labels:
# data.append(baseline_noDpdk[l])
# xmark.append('baseline-nodpdk')
data.append(baseline_Dpdk[l])
xmark.append('Baseline')
# data.append(sriov_noDpdk[l])
# xmark.append('sriov-nodpdk')
data.append(sriov_dpdk[l])
xmark.append(' 1 vswitch\nVM')
bp_dict = plt.boxplot(data, patch_artist=False)
plt.setp(bp_dict['whiskers'], color='black', linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color='blue', linewidth=1, marker='+', markersize=1)
plt.setp(bp_dict['boxes'], linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks([1, 2], tuple(["B", "1"]))
plt.plot([1.5, 1.5], [-1, 10000], color='#000000')
# plt.axvspan(1.5, 5.0, facecolor='0.6', alpha=0.5)
plt.ylim((1,10000))
# plt.ylabel('Latency (microsecond)')
# ax.add_patch(Rectangle((1.49, .9), 1, 10002, alpha=0.1, color='blue'))
# ax.add_patch(Rectangle((2.49, .9), 1, 10002, alpha=0.1, color='orange'))
# ax.add_patch(Rectangle((3.49, .9), 1, 10002, alpha=0.1, color='green'))
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * 0.91, box.height * 0.80])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
plt.figtext(0.26, 0.209, "No DPDK", color="black")
plt.figtext(0.72, 0.209, "With DPDK", color="black")
ax.legend(['B: Baseline', '1: 1 vswitch VM'], handletextpad=-0.1, handlelength=0, markerscale=0, loc='lower center', ncol=2, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-SplitSingles.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-SplitSingles.png', dpi=(250), format='png')
plt.close()
def plotLatencyMulti(pcapAnalysisPath,topology):
Baseline_MultiTenant_NoDPDK = {}
Baseline_MultiTenant_DPDK = {}
SRIOV_MultiTenant_NoDPDK = {}
SRIOV_MultiTenant_DPDK = {}
SRIOV_MultiOvs_DPDK = {}
SRIOV_MultiOvs_NoDPDK = {}
SRIOV_MultiOvs_NoDPDK_Isolated = {}
SRIOV_MultiOvs_DPDK_Isolated = {}
if topology == "phy2phy":
Baseline_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_MultiTenant_NoDPDK-')
Baseline_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_MultiTenant_DPDK-')
SRIOV_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_MultiTenant_DPDK-')
SRIOV_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_MultiTenant_NoDPDK-')
SRIOV_MultiOvs_DPDK = read_lat_dict(pcapAnalysisPath + 'phy2phy-latency-SRIOV_MultiOvs_DPDK-')
SRIOV_MultiOvs_NoDPDK = read_lat_dict(pcapAnalysisPath + 'phy2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_NoDPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_DPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2phy-latency-SRIOV_MultiOvs_DPDK-')
elif topology == "phy2vm2vm2phy":
Baseline_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_MultiTenant_NoDPDK-')
Baseline_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_MultiTenant_DPDK-')
SRIOV_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_MultiTenant_DPDK-')
SRIOV_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_MultiTenant_NoDPDK-')
SRIOV_MultiOvs_DPDK = read_lat_dict(pcapAnalysisPath + 'phy2vm2vm2phy-latency-SRIOV_MultiOvs_DPDK-')
SRIOV_MultiOvs_NoDPDK = read_lat_dict(pcapAnalysisPath + 'phy2vm2vm2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_NoDPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2vm2vm2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_DPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2vm2vm2phy-latency-SRIOV_MultiOvs_DPDK-')
# print Baseline_MultiTenant_NoDPDK
# print SRIOV_MultiTenant_DPDK
# print SRIOV_MultiTenant_NoDPDK
# print SRIOV_MultiOvs_DPDK
# print SRIOV_MultiOvs_NoDPDK
fig = plt.figure(1, figsize = (8.75,4.6),frameon=True)
fig.autofmt_xdate(bottom=0.1, rotation=90, ha='right')
ax = plt.subplot(111)
c = 0
data = []
xmark = []
data.append([])
xmark.append("")
c = 0
for l in labels:
data.append(Baseline_MultiTenant_NoDPDK[l])
xmark.append('Baseline_MultiTenant_NoDPDK')
data.append(Baseline_MultiTenant_DPDK[l])
xmark.append('Baseline_MultiTenant_DPDK')
data.append(SRIOV_MultiTenant_NoDPDK[l])
xmark.append('SRIOV_MultiTenant_NoDPDK')
data.append(SRIOV_MultiTenant_DPDK[l])
xmark.append('SRIOV_MultiTenant_DPDK')
data.append(SRIOV_MultiOvs_NoDPDK[l])
xmark.append('SRIOV_MultiOvs_NoDPDK')
data.append(SRIOV_MultiOvs_DPDK[l])
xmark.append('SRIOV_MultiOvs_DPDK')
data.append(SRIOV_MultiOvs_NoDPDK_Isolated[l])
xmark.append('SRIOV_MultiOvs_NoDPDK_Isolated')
data.append(SRIOV_MultiOvs_DPDK_Isolated[l])
xmark.append('SRIOV_MultiOvs_DPDK_Isolated')
ax.text(6.0, 10000.05, u'64$B$')
ax.text(12.0, 10000.05, u'512$B$')
ax.text(18.0, 10000.05, u'1500$B$')
ax.text(23.0, 10000.05, u'2048$B$')
bp_dict = plt.boxplot(data, patch_artist=False)
plt.setp(bp_dict['whiskers'], color='black', linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color='blue', linewidth=1, marker='+', markersize=2)
plt.setp(bp_dict['boxes'], linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks(range(1, 35), tuple(xmark), rotation='-45', ha='left')
# Print median values for debug
# medians=[]
# for line in bp_dict['medians']:
# # get position data for median line
# x, y = line.get_xydata()[1] # top of median line
# # overlay median value
# text(x, y, '%.4f' % y,
# horizontalalignment='center', fontsize=5) # draw above, centered
# print "%.4f" % y
# medians.append(y)
# plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^'))
plt.plot([1.0, 1.0], [-1, 10000], color='#000000')
plt.plot([9.5, 9.5], [-1, 10000], color='#000000')
plt.plot([17.5, 17.5], [-1, 10000], color='#000000')
plt.plot([25.5, 25.5], [-1, 10000], color='#000000')
plt.plot([33.5, 33.5], [-1, 10000], color='#000000')
plt.ylim((0.001,10))
plt.ylabel('Latency in millisecond')
plt.xlabel("Scenario mode")
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.25, box.width * 1.0, box.height * 0.78])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
# ax.set_xscale('log')
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-Multi.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-Multi.png', dpi=(250), format='png')
plt.close()
def plotLatencyMultiSplit(pcapAnalysisPath,topology):
Baseline_MultiTenant_NoDPDK = {}
Baseline_MultiTenant_DPDK = {}
SRIOV_MultiTenant_NoDPDK = {}
SRIOV_MultiTenant_DPDK = {}
SRIOV_MultiOvs_DPDK = {}
SRIOV_MultiOvs_NoDPDK = {}
SRIOV_MultiOvs_NoDPDK_Isolated = {}
SRIOV_MultiOvs_DPDK_Isolated = {}
if topology == "phy2phy":
Baseline_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_MultiTenant_NoDPDK-')
Baseline_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_MultiTenant_DPDK-')
SRIOV_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_MultiTenant_DPDK-')
SRIOV_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_MultiTenant_NoDPDK-')
SRIOV_MultiOvs_DPDK = read_lat_dict(pcapAnalysisPath + 'phy2phy-latency-SRIOV_MultiOvs_DPDK-')
SRIOV_MultiOvs_NoDPDK = read_lat_dict(pcapAnalysisPath + 'phy2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_NoDPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_DPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2phy-latency-SRIOV_MultiOvs_DPDK-')
elif topology == "phy2vm2vm2phy":
Baseline_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_MultiTenant_NoDPDK-')
Baseline_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_MultiTenant_DPDK-')
SRIOV_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_MultiTenant_DPDK-')
SRIOV_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_MultiTenant_NoDPDK-')
SRIOV_MultiOvs_DPDK = read_lat_dict(pcapAnalysisPath + 'phy2vm2vm2phy-latency-SRIOV_MultiOvs_DPDK-')
SRIOV_MultiOvs_NoDPDK = read_lat_dict(pcapAnalysisPath + 'phy2vm2vm2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_NoDPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2vm2vm2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_DPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2vm2vm2phy-latency-SRIOV_MultiOvs_DPDK-')
# print Baseline_MultiTenant_NoDPDK
# print SRIOV_MultiTenant_DPDK
# print SRIOV_MultiTenant_NoDPDK
# print SRIOV_MultiOvs_DPDK
# print SRIOV_MultiOvs_NoDPDK
fig = plt.figure(1, figsize = (3.487, 2.15512978986403),frameon=True)
fig.autofmt_xdate(bottom=0.1, rotation=90, ha='right')
ax = plt.subplot(1, 2, 1)
plt.tight_layout()
c = 0
data = []
xmark = []
# data.append([])
# xmark.append("")
c = 0
labels = ["64bytes"]
for l in labels:
data.append(Baseline_MultiTenant_NoDPDK[l])
xmark.append('B')
# data.append(Baseline_MultiTenant_DPDK[l])
# xmark.append('Baseline_MultiTenant_DPDK')
data.append(SRIOV_MultiTenant_NoDPDK[l])
xmark.append('P1')
# data.append(SRIOV_MultiTenant_DPDK[l])
# xmark.append('SRIOV_MultiTenant_DPDK')
data.append(SRIOV_MultiOvs_NoDPDK[l])
xmark.append('P2.1')
# data.append(SRIOV_MultiOvs_DPDK[l])
# xmark.append('SRIOV_MultiOvs_DPDK')
data.append(SRIOV_MultiOvs_NoDPDK_Isolated[l])
xmark.append('P2.2')
# data.append(SRIOV_MultiOvs_DPDK_Isolated[l])
# xmark.append('SRIOV_MultiOvs_DPDK_Isolated')
# ax.text(6.0, 10000.05, u'64$B$')
# ax.text(12.0, 10000.05, u'512$B$')
# ax.text(18.0, 10000.05, u'1500$B$')
# ax.text(23.0, 10000.05, u'2048$B$')
bp_dict = plt.boxplot(data, patch_artist=False)
plt.setp(bp_dict['whiskers'], color='black', linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color='blue', linewidth=1, marker='+', markersize=1)
plt.setp(bp_dict['boxes'], linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks(range(1, 5), tuple(xmark))
# Print median values for debug
# medians=[]
# for line in bp_dict['medians']:
# # get position data for median line
# x, y = line.get_xydata()[1] # top of median line
# # overlay median value
# text(x, y, '%.4f' % y,
# horizontalalignment='center', fontsize=5) # draw above, centered
# print "%.4f" % y
# medians.append(y)
# plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^'))
# plt.plot([1.0, 1.0], [-1, 10000], color='#000000')
plt.plot([1.5, 1.5], [-1, 10000], color='#000000')
# plt.plot([9.5, 9.5], [-1, 10000], color='#000000')
# plt.plot([17.5, 17.5], [-1, 10000], color='#000000')
# plt.plot([25.5, 25.5], [-1, 10000], color='#000000')
# plt.plot([33.5, 33.5], [-1, 10000], color='#000000')
plt.axvspan(1.5, 5.0, facecolor='0.6', alpha=0.5)
plt.ylim((1,10000))
plt.ylabel('Latency (microsecond)')
# plt.xlabel("No DPDK")
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * 0.91, box.height * 0.80])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
# ax.set_xscale('log')
### Second plot with dpdk
ax = plt.subplot(1, 2, 2)
c = 0
data = []
xmark = []
# data.append([])
# xmark.append("")
c = 0
labels = ["64bytes"]
for l in labels:
# data.append(Baseline_MultiTenant_NoDPDK[l])
# xmark.append('Baseline_MultiTenant_NoDPDK')
data.append(Baseline_MultiTenant_DPDK[l])
xmark.append('B')
# data.append(SRIOV_MultiTenant_NoDPDK[l])
# xmark.append('SRIOV_MultiTenant_NoDPDK')
data.append(SRIOV_MultiTenant_DPDK[l])
xmark.append('P1+\nP3')
# data.append(SRIOV_MultiOvs_NoDPDK[l])
# xmark.append('SRIOV_MultiOvs_NoDPDK')
data.append(SRIOV_MultiOvs_DPDK[l])
xmark.append('P2.1+\nP3')
# data.append(SRIOV_MultiOvs_NoDPDK_Isolated[l])
# xmark.append('SRIOV_MultiOvs_NoDPDK_Isolated')
data.append(SRIOV_MultiOvs_DPDK_Isolated[l])
xmark.append('P2.2+\nP3')
# ax.text(6.0, 10000.05, u'64$B$')
# ax.text(12.0, 10000.05, u'512$B$')
# ax.text(18.0, 10000.05, u'1500$B$')
# ax.text(23.0, 10000.05, u'2048$B$')
bp_dict = plt.boxplot(data, patch_artist=False)
plt.setp(bp_dict['whiskers'], color='black', linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color='blue', linewidth=1, marker='+', markersize=1)
plt.setp(bp_dict['boxes'], linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks(range(1, 5), tuple(xmark))
# Print median values for debug
# medians=[]
# for line in bp_dict['medians']:
# # get position data for median line
# x, y = line.get_xydata()[1] # top of median line
# # overlay median value
# text(x, y, '%.4f' % y,
# horizontalalignment='center', fontsize=5) # draw above, centered
# print "%.4f" % y
# medians.append(y)
# plt.grid(True)
marker = itertools.cycle(('d', '*', 'o', '^'))
# plt.plot([1.0, 1.0], [-1, 10000], color='#000000')
plt.plot([1.5, 1.5], [-1, 10000], color='#000000')
# plt.plot([9.5, 9.5], [-1, 10000], color='#000000')
# plt.plot([17.5, 17.5], [-1, 10000], color='#000000')
# plt.plot([25.5, 25.5], [-1, 10000], color='#000000')
# plt.plot([33.5, 33.5], [-1, 10000], color='#000000')
plt.axvspan(1.5, 5.0, facecolor='0.6', alpha=0.5)
plt.ylim((1,10000))
# plt.ylabel('Latency in millisecond')
# plt.xlabel("DPDK")
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * 0.91, box.height * 0.80])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
# plt.figtext(0.15, 0.15, 'B: Baseline', color='black')
# plt.figtext(0.45, 0.15, 'P2.1: Principle 2 (shared cores)', color='black')
# plt.figtext(0.15, 0.035, 'P1: Principle 1', color='black')
# plt.figtext(0.45, 0.035, 'P2.2: Principle 2 (isolated cores)', color='black')
ax.legend(['B: Baseline', 'P1: Principle 1', 'P2.1: Principle 2 (shared CPU)', 'P2.2: Principle 2 (isolated CPU)', 'P3: Principle 3'], handletextpad=-0.18, handlelength=0, markerscale=0, loc='lower center', ncol=3, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
# plt.add_patch(Rectangle((0, 0), 10, 10))
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-Multi-Split.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-Multi-Split.png', dpi=(250), format='png')
plt.close()
def plotLatencyMultiSplitSingles(pcapAnalysisPath,topology):
Baseline_MultiTenant_NoDPDK = {}
Baseline_MultiTenant_DPDK = {}
SRIOV_MultiTenant_NoDPDK = {}
SRIOV_MultiTenant_DPDK = {}
SRIOV_MultiOvs_DPDK = {}
SRIOV_MultiOvs_NoDPDK = {}
SRIOV_MultiOvs_NoDPDK_Isolated = {}
SRIOV_MultiOvs_DPDK_Isolated = {}
if topology == "phy2phy":
Baseline_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_MultiTenant_NoDPDK-')
Baseline_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-Baseline_MultiTenant_DPDK-')
SRIOV_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_MultiTenant_DPDK-')
SRIOV_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2phy-latency-SRIOV_MultiTenant_NoDPDK-')
SRIOV_MultiOvs_DPDK = read_lat_dict(pcapAnalysisPath + 'phy2phy-latency-SRIOV_MultiOvs_DPDK-')
SRIOV_MultiOvs_NoDPDK = read_lat_dict(pcapAnalysisPath + 'phy2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_NoDPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_DPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2phy-latency-SRIOV_MultiOvs_DPDK-')
elif topology == "phy2vm2vm2phy":
Baseline_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_MultiTenant_NoDPDK-')
Baseline_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-Baseline_MultiTenant_DPDK-')
SRIOV_MultiTenant_DPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_MultiTenant_DPDK-')
SRIOV_MultiTenant_NoDPDK = read_lat_dict(pcapAnalysisPath+'phy2vm2vm2phy-latency-SRIOV_MultiTenant_NoDPDK-')
SRIOV_MultiOvs_DPDK = read_lat_dict(pcapAnalysisPath + 'phy2vm2vm2phy-latency-SRIOV_MultiOvs_DPDK-')
SRIOV_MultiOvs_NoDPDK = read_lat_dict(pcapAnalysisPath + 'phy2vm2vm2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_NoDPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2vm2vm2phy-latency-SRIOV_MultiOvs_NoDPDK-')
SRIOV_MultiOvs_DPDK_Isolated = read_lat_dict(pcapAnalysisPathLatencyIsolated+'phy2vm2vm2phy-latency-SRIOV_MultiOvs_DPDK-')
# print Baseline_MultiTenant_NoDPDK
# print SRIOV_MultiTenant_DPDK
# print SRIOV_MultiTenant_NoDPDK
# print SRIOV_MultiOvs_DPDK
# print SRIOV_MultiOvs_NoDPDK
fig = plt.figure(1, figsize = (3.487, 2.15512978986403),frameon=True)
fig.autofmt_xdate(bottom=0.1, rotation=90, ha='right')
ax = plt.subplot(1, 2, 1)
plt.tight_layout()
c = 0
data = []
xmark = []
# data.append([])
# xmark.append("")
c = 0
labels = ["64bytes"]
for l in labels:
data.append(Baseline_MultiTenant_NoDPDK[l])
xmark.append('Baseline')
# data.append(Baseline_MultiTenant_DPDK[l])
# xmark.append('Baseline_MultiTenant_DPDK')
data.append(SRIOV_MultiTenant_NoDPDK[l])
xmark.append('1\nvswitch\nVM')
# data.append(SRIOV_MultiTenant_DPDK[l])
# xmark.append('SRIOV_MultiTenant_DPDK')
data.append(SRIOV_MultiOvs_NoDPDK[l])
xmark.append('2\nvswitch\nVM\n(shared)')
# data.append(SRIOV_MultiOvs_DPDK[l])
# xmark.append('SRIOV_MultiOvs_DPDK')
data.append(SRIOV_MultiOvs_NoDPDK_Isolated[l])
xmark.append('2\nvswitch\nVM\n(isolated)')
# data.append(SRIOV_MultiOvs_DPDK_Isolated[l])
# xmark.append('SRIOV_MultiOvs_DPDK_Isolated')
# ax.text(6.0, 10000.05, u'64$B$')
# ax.text(12.0, 10000.05, u'512$B$')
# ax.text(18.0, 10000.05, u'1500$B$')
# ax.text(23.0, 10000.05, u'2048$B$')
bp_dict = plt.boxplot(data, patch_artist=False)
colors = ['black', '#1F77B4', '#FF7F0E', '#2CA02C']
colors = ['black']
for color in colors:
plt.setp(bp_dict['whiskers'], color=color, linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color=color, linewidth=1, marker='+', markersize=1)
plt.setp(bp_dict['boxes'], color=color, linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks([1, 2, 3, 4], tuple(["B", "1", "2.1", "2.2"]))
# plt.xticks(range(1, 5), tuple(xmark))
plt.plot([1.5, 1.5], [-1, 10000], color='#000000')
plt.plot([2.5, 2.5], [-1, 10000], color='#000000', alpha=0.1, linewidth=0.5)
plt.plot([3.5, 3.5], [-1, 10000], color='#000000', alpha=0.1, linewidth=0.5)
# plt.axvspan(1.5, 5.0, facecolor='0.6', alpha=0.5)
plt.ylim((1,10000))
plt.ylabel('Latency (microsecond)')
# ax.add_patch(Rectangle((1.49, .9), 1, 10002, alpha=0.2, color='#1F77B4'))
# ax.add_patch(Rectangle((2.49, .9), 1, 10002, alpha=0.2, color='#FF7F0E'))
# ax.add_patch(Rectangle((3.49, .9), 1, 10002, alpha=0.2, color='#2CA02C'))
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * 0.91, box.height * 0.80])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
### Second plot with dpdk
ax = plt.subplot(1, 2, 2)
c = 0
data = []
xmark = []
# data.append([])
# xmark.append("")
c = 0
labels = ["64bytes"]
for l in labels:
# data.append(Baseline_MultiTenant_NoDPDK[l])
# xmark.append('Baseline_MultiTenant_NoDPDK')
data.append(Baseline_MultiTenant_DPDK[l])
xmark.append('Baseline')
# data.append(SRIOV_MultiTenant_NoDPDK[l])
# xmark.append('SRIOV_MultiTenant_NoDPDK')
data.append(SRIOV_MultiTenant_DPDK[l])
xmark.append('1\nvswitch\nVM')
# data.append(SRIOV_MultiOvs_NoDPDK[l])
# xmark.append('SRIOV_MultiOvs_NoDPDK')
data.append(SRIOV_MultiOvs_DPDK[l])
xmark.append('2\nvswitch\nVM\n(shared CPU)')
# data.append(SRIOV_MultiOvs_NoDPDK_Isolated[l])
# xmark.append('SRIOV_MultiOvs_NoDPDK_Isolated')
data.append(SRIOV_MultiOvs_DPDK_Isolated[l])
xmark.append('2\nvswitch\nVM\n(isolated CPU)')
bp_dict = plt.boxplot(data, patch_artist=False)
plt.setp(bp_dict['whiskers'], color='black', linewidth=1, linestyle='-')
plt.setp(bp_dict['fliers'], color='blue', linewidth=1, marker='+', markersize=1)
plt.setp(bp_dict['boxes'], linewidth=1)
plt.setp(bp_dict['medians'], linewidth=1, color='red')
plt.xticks([1, 2, 3, 4], tuple(["B", "1", "2.1", "2.2"]))
# plt.xticks(range(1, 5), tuple(xmark))
plt.plot([1.5, 1.5], [-1, 10000], color='#000000')
plt.plot([2.5, 2.5], [-1, 10000], color='#000000', alpha=0.1, linewidth=0.5)
plt.plot([3.5, 3.5], [-1, 10000], color='#000000', alpha=0.1, linewidth=0.5)
# plt.axvspan(1.5, 5.0, facecolor='0.6', alpha=0.5)
plt.ylim((1,10000))
# ax.add_patch(Rectangle((1.49, .9), 1, 10002, alpha=0.01, color='#1F77B4'))
# ax.add_patch(Rectangle((2.49, .9), 1, 10002, alpha=0.01, color='#FF7F0E'))
# ax.add_patch(Rectangle((3.49, .9), 1, 10002, alpha=0.01, color='#2CA02C'))
box = ax.get_position()
ax.set_position([box.x0 + 0.05, box.y0 + box.height * 0.25, box.width * 0.91, box.height * 0.80])
ax.yaxis.grid(True, linestyle='-', which='major', color='grey', alpha=0.8)
ax.set_axisbelow(True)
ax.set_yscale('log')
plt.figtext(0.26, 0.209, "No DPDK", color="black")
plt.figtext(0.72, 0.209, "With DPDK", color="black")
ax.legend(['B: Baseline', '1: 1 vswitch VM', '2.1: 2 vswitch VM (shared)', '2.2: 2 vswitch VM (isolated)'], handletextpad=-0.1, handlelength=0, markerscale=0, loc='lower center', ncol=2, bbox_to_anchor=(-0.315, -0.5), numpoints=1)
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-Multi-SplitSingles.pdf', dpi=(2500), format='pdf')
plt.savefig(pcapAnalysisPath+'plot_box_latency_'+topology+'-Multi-SplitSingles.png', dpi=(250), format='png')
plt.close()
def read_lat_dict(path):
# print "read_lat_dict()"
# import ast
ret = {}
for i in labels:
# print "i: " + str(i)
ret[i] = []
try:
# print "printing the combo: "
# print (str(path+i+'.res'))
# data = ast.literal_eval(open(path+i+'.res').read())
data = json.loads(open(path+i+'.res').read())
# print type(data)
# print len(data.keys())
# continue
for j in range(lat_packet_start_index, lat_packet_end_index):
ret[i].append(data[unicode(str(j))] * 1000000.0) #in millisecond
# if data[unicode(str(j))] * 1000.0 < 1:
# ret[i].append(data[unicode(str(j))] * 1000.0)
print "len of ret is:" + str(len(ret[i]))
except:
pass
# print ret
return ret
# #### VISUALIZATION STUFF ####
# plotThroughputLoss(pcapAnalysisPathThroughput, topology)
# plotThroughputMultiLoss(pcapAnalysisPathThroughput, topology)
for topology in topologies:
print "Plot the throughput"
plotThroughputSplit(pcapAnalysisPathThroughput, topology)
plotThroughputMultiSplit(pcapAnalysisPathThroughput, topology)
print "Plot the latency"
plotLatencySplitSingles(pcapAnalysisPathLatency, topology)
plotLatencyMultiSplitSingles(pcapAnalysisPathLatency, topology)
# break
| 53.836299
| 262
| 0.70197
| 9,418
| 75,640
| 5.362816
| 0.040985
| 0.077217
| 0.056428
| 0.018532
| 0.952304
| 0.945295
| 0.941295
| 0.935276
| 0.929832
| 0.92015
| 0
| 0.048781
| 0.158488
| 75,640
| 1,404
| 263
| 53.874644
| 0.744706
| 0.169276
| 0
| 0.823951
| 0
| 0.004094
| 0.238921
| 0.182894
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.001024
| 0.013306
| null | null | 0.051177
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
67c72716235820b31bc180e53a2f5792284acdfb
| 19,351
|
py
|
Python
|
tests/st/scipy_st/sparse/test_linalg.py
|
zhz44/mindspore
|
6044d34074c8505dd4b02c0a05419cbc32a43f86
|
[
"Apache-2.0"
] | null | null | null |
tests/st/scipy_st/sparse/test_linalg.py
|
zhz44/mindspore
|
6044d34074c8505dd4b02c0a05419cbc32a43f86
|
[
"Apache-2.0"
] | null | null | null |
tests/st/scipy_st/sparse/test_linalg.py
|
zhz44/mindspore
|
6044d34074c8505dd4b02c0a05419cbc32a43f86
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""st for scipy.sparse.linalg."""
import pytest
import numpy as onp
import scipy as osp
import scipy.sparse.linalg
import mindspore.ops as ops
import mindspore.nn as nn
import mindspore.scipy as msp
from mindspore import context
from mindspore.common import Tensor
from tests.st.scipy_st.utils import create_sym_pos_matrix, create_full_rank_matrix, to_tensor
def _fetch_preconditioner(preconditioner, A):
"""
Returns one of various preconditioning matrices depending on the identifier
`preconditioner' and the input matrix A whose inverse it supposedly
approximates.
"""
if preconditioner == 'identity':
M = onp.eye(A.shape[0], dtype=A.dtype)
elif preconditioner == 'random':
random_metrix = create_sym_pos_matrix(A.shape, A.dtype)
M = onp.linalg.inv(random_metrix)
elif preconditioner == 'exact':
M = onp.linalg.inv(A)
else:
M = None
return M
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('tensor_type, dtype, tol', [('Tensor', onp.float32, 1e-5), ('Tensor', onp.float64, 1e-12),
('CSRTensor', onp.float32, 1e-5)])
@pytest.mark.parametrize('shape', [(7, 7)])
@pytest.mark.parametrize('preconditioner', [None, 'identity', 'exact', 'random'])
@pytest.mark.parametrize('maxiter', [3, None])
def test_cg_against_scipy(tensor_type, dtype, tol, shape, preconditioner, maxiter):
"""
Feature: ALL TO ALL
Description: test cases for cg using function way in pynative/graph mode
Expectation: the result match scipy
"""
onp.random.seed(0)
a = create_sym_pos_matrix(shape, dtype)
b = onp.random.random(shape[:1]).astype(dtype)
m = _fetch_preconditioner(preconditioner, a)
osp_res = scipy.sparse.linalg.cg(a, b, M=m, maxiter=maxiter, atol=tol, tol=tol)
a = to_tensor((a, tensor_type))
b = Tensor(b)
m = to_tensor((m, tensor_type)) if m is not None else m
# using PYNATIVE MODE
context.set_context(mode=context.PYNATIVE_MODE)
msp_res_dyn = msp.sparse.linalg.cg(a, b, M=m, maxiter=maxiter, atol=tol, tol=tol)
# using GRAPH MODE
context.set_context(mode=context.GRAPH_MODE)
msp_res_sta = msp.sparse.linalg.cg(a, b, M=m, maxiter=maxiter, atol=tol, tol=tol)
kw = {"atol": tol, "rtol": tol}
onp.testing.assert_allclose(osp_res[0], msp_res_dyn[0].asnumpy(), **kw)
onp.testing.assert_allclose(osp_res[0], msp_res_sta[0].asnumpy(), **kw)
assert osp_res[1] == msp_res_dyn[1].asnumpy().item()
assert osp_res[1] == msp_res_sta[1].asnumpy().item()
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('dtype', [onp.float32, onp.float64])
@pytest.mark.parametrize('shape', [(2, 2)])
def test_cg_against_numpy(dtype, shape):
"""
Feature: ALL TO ALL
Description: test cases for cg
Expectation: the result match numpy
"""
onp.random.seed(0)
a = create_sym_pos_matrix(shape, dtype)
b = onp.random.random(shape[:1]).astype(dtype)
expected = onp.linalg.solve(a, b)
# using PYNATIVE MODE
context.set_context(mode=context.PYNATIVE_MODE)
actual_dyn, _ = msp.sparse.linalg.cg(Tensor(a), Tensor(b))
# using GRAPH MODE
context.set_context(mode=context.GRAPH_MODE)
actual_sta, _ = msp.sparse.linalg.cg(Tensor(a), Tensor(b))
kw = {"atol": 1e-5, "rtol": 1e-5}
onp.testing.assert_allclose(expected, actual_dyn.asnumpy(), **kw)
onp.testing.assert_allclose(expected, actual_sta.asnumpy(), **kw)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('tensor_type, dtype, tol', [('Tensor', onp.float32, 1e-5), ('Tensor', onp.float64, 1e-12),
('CSRTensor', onp.float32, 1e-5)])
@pytest.mark.parametrize('shape', [(7, 7)])
@pytest.mark.parametrize('preconditioner', [None, 'identity', 'exact', 'random'])
@pytest.mark.parametrize('maxiter', [3, None])
def test_cg_against_scipy_graph(tensor_type, dtype, tol, shape, preconditioner, maxiter):
"""
Feature: ALL TO ALL
Description: test cases for cg within Cell object in pynative/graph mode
Expectation: the result match scipy
"""
class Net(nn.Cell):
def construct(self, a, b, m, maxiter, tol):
return msp.sparse.linalg.cg(a, b, M=m, maxiter=maxiter, atol=tol, tol=tol)
onp.random.seed(0)
a = create_sym_pos_matrix(shape, dtype)
b = onp.random.random(shape[:1]).astype(dtype)
m = _fetch_preconditioner(preconditioner, a)
osp_res = scipy.sparse.linalg.cg(a, b, M=m, maxiter=maxiter, atol=tol, tol=tol)
a = to_tensor((a, tensor_type))
b = Tensor(b)
m = to_tensor((m, tensor_type)) if m is not None else m
# using PYNATIVE MODE
context.set_context(mode=context.PYNATIVE_MODE)
msp_res_dyn = Net()(a, b, m, maxiter, tol)
# using GRAPH MODE
context.set_context(mode=context.GRAPH_MODE)
msp_res_sta = Net()(a, b, m, maxiter, tol)
kw = {"atol": tol, "rtol": tol}
onp.testing.assert_allclose(osp_res[0], msp_res_dyn[0].asnumpy(), **kw)
onp.testing.assert_allclose(osp_res[0], msp_res_sta[0].asnumpy(), **kw)
assert osp_res[1] == msp_res_dyn[1].asnumpy().item()
assert osp_res[1] == msp_res_sta[1].asnumpy().item()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('tensor_type, dtype, tol', [('Tensor', onp.float32, 1e-5), ('Tensor', onp.float64, 1e-8),
('CSRTensor', onp.float32, 1e-5)])
@pytest.mark.parametrize('a, b, grad_a, grad_b', [
([[1.96822833, 0.82204467, 1.03749232, 0.88915326, 0.44986806, 1.11167143],
[0.82204467, 2.25216591, 1.40235719, 0.70838919, 0.81377919, 1.06000368],
[1.03749232, 1.40235719, 2.90618746, 0.7126087, 0.81029544, 1.28673025],
[0.88915326, 0.70838919, 0.7126087, 2.17515263, 0.40443765, 1.02082996],
[0.44986806, 0.81377919, 0.81029544, 0.40443765, 1.60570668, 0.62292701],
[1.11167143, 1.06000368, 1.28673025, 1.02082996, 0.62292701, 2.30795277]],
[0.79363745, 0.58000418, 0.1622986, 0.70075235, 0.96455108, 0.50000836],
[[-0.07867674, -0.01521201, 0.06394698, -0.03854052, -0.13523701, 0.01326866],
[-0.03508505, -0.00678363, 0.02851647, -0.01718673, -0.06030749, 0.00591702],
[-0.00586019, -0.00113306, 0.00476305, -0.00287067, -0.01007304, 0.00098831],
[-0.07704304, -0.01489613, 0.06261914, -0.03774023, -0.13242886, 0.01299314],
[-0.14497008, -0.02802971, 0.11782896, -0.07101491, -0.24918826, 0.02444888],
[-0.01868565, -0.00361284, 0.01518735, -0.00915334, -0.03211867, 0.00315129]],
[0.22853142, 0.10191113, 0.01702201, 0.22378603, 0.42109291, 0.054276]),
([[1.85910724, 0.73233206, 0.65960803, 1.03821349, 0.55277616],
[0.73233206, 1.69548841, 0.59992146, 1.01518264, 0.50824059],
[0.65960803, 0.59992146, 1.98169091, 1.45565213, 0.47901749],
[1.03821349, 1.01518264, 1.45565213, 3.3133049, 0.75598147],
[0.55277616, 0.50824059, 0.47901749, 0.75598147, 1.46831254]],
[0.59674531, 0.226012, 0.10694568, 0.22030621, 0.34982629],
[[-0.07498642, 0.00167461, 0.01353184, 0.01008293, -0.03770084],
[-0.09940184, 0.00221986, 0.01793778, 0.01336592, -0.04997616],
[-0.09572781, 0.00213781, 0.01727477, 0.01287189, -0.04812897],
[0.03135044, -0.00070012, -0.00565741, -0.00421549, 0.01576203],
[-0.14053766, 0.00313851, 0.02536103, 0.01889718, -0.07065797]],
[0.23398106, 0.31016481, 0.29870068, -0.09782316, 0.43852141]),
])
def test_cg_grad(tensor_type, dtype, tol, a, b, grad_a, grad_b):
"""
Feature: ALL TO ALL
Description: test cases for grad implementation of cg in graph mode
Expectation: the result match expectation
"""
context.set_context(mode=context.GRAPH_MODE)
a = to_tensor((a, tensor_type), dtype)
b = Tensor(onp.array(b, dtype=dtype))
expect_grad_a = onp.array(grad_a, dtype=dtype)
expect_grad_b = onp.array(grad_b, dtype=dtype)
kw = {"atol": tol, "rtol": tol}
# Function
grad_net = ops.GradOperation(get_all=True)(msp.sparse.linalg.cg)
grad_a, grad_b = grad_net(a, b)[:2]
onp.testing.assert_allclose(expect_grad_a, grad_a.asnumpy(), **kw)
onp.testing.assert_allclose(expect_grad_b, grad_b.asnumpy(), **kw)
# Cell
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.sum = ops.ReduceSum()
self.cg = msp.sparse.linalg.cg
def construct(self, a, b):
x, _ = self.cg(a, b)
return self.sum(x)
grad_net = ops.GradOperation(get_all=True)(Net())
grad_a, grad_b = grad_net(a, b)[:2]
onp.testing.assert_allclose(expect_grad_a, grad_a.asnumpy(), **kw)
onp.testing.assert_allclose(expect_grad_b, grad_b.asnumpy(), **kw)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('tensor_type, dtype, tol', [('Tensor', onp.float32, 1e-5), ('Tensor', onp.float64, 1e-8)])
@pytest.mark.parametrize('a, b, grad_a, grad_b', [
([[1.96822833, 0.82204467, 1.03749232, 0.88915326, 0.44986806, 1.11167143],
[0.82204467, 2.25216591, 1.40235719, 0.70838919, 0.81377919, 1.06000368],
[1.03749232, 1.40235719, 2.90618746, 0.7126087, 0.81029544, 1.28673025],
[0.88915326, 0.70838919, 0.7126087, 2.17515263, 0.40443765, 1.02082996],
[0.44986806, 0.81377919, 0.81029544, 0.40443765, 1.60570668, 0.62292701],
[1.11167143, 1.06000368, 1.28673025, 1.02082996, 0.62292701, 2.30795277]],
[0.79363745, 0.58000418, 0.1622986, 0.70075235, 0.96455108, 0.50000836],
[[-0.07867674, -0.01521201, 0.06394698, -0.03854052, -0.13523701, 0.01326866],
[-0.03508505, -0.00678363, 0.02851647, -0.01718673, -0.06030749, 0.00591702],
[-0.00586019, -0.00113306, 0.00476305, -0.00287067, -0.01007304, 0.00098831],
[-0.07704304, -0.01489613, 0.06261914, -0.03774023, -0.13242886, 0.01299314],
[-0.14497008, -0.02802971, 0.11782896, -0.07101491, -0.24918826, 0.02444888],
[-0.01868565, -0.00361284, 0.01518735, -0.00915334, -0.03211867, 0.00315129]],
[0.22853142, 0.10191113, 0.01702201, 0.22378603, 0.42109291, 0.054276]),
([[1.85910724, 0.73233206, 0.65960803, 1.03821349, 0.55277616],
[0.73233206, 1.69548841, 0.59992146, 1.01518264, 0.50824059],
[0.65960803, 0.59992146, 1.98169091, 1.45565213, 0.47901749],
[1.03821349, 1.01518264, 1.45565213, 3.3133049, 0.75598147],
[0.55277616, 0.50824059, 0.47901749, 0.75598147, 1.46831254]],
[0.59674531, 0.226012, 0.10694568, 0.22030621, 0.34982629],
[[-0.07498642, 0.00167461, 0.01353184, 0.01008293, -0.03770084],
[-0.09940184, 0.00221986, 0.01793778, 0.01336592, -0.04997616],
[-0.09572781, 0.00213781, 0.01727477, 0.01287189, -0.04812897],
[0.03135044, -0.00070012, -0.00565741, -0.00421549, 0.01576203],
[-0.14053766, 0.00313851, 0.02536103, 0.01889718, -0.07065797]],
[0.23398106, 0.31016481, 0.29870068, -0.09782316, 0.43852141]),
])
def test_cg_grad_pynative(tensor_type, dtype, tol, a, b, grad_a, grad_b):
"""
Feature: ALL TO ALL
Description: test cases for grad implementation of cg in pynative mode
Expectation: the result match expectation
"""
context.set_context(mode=context.PYNATIVE_MODE)
a = to_tensor((a, tensor_type), dtype)
b = Tensor(onp.array(b, dtype=dtype))
expect_grad_a = onp.array(grad_a, dtype=dtype)
expect_grad_b = onp.array(grad_b, dtype=dtype)
kw = {"atol": tol, "rtol": tol}
# Function
grad_net = ops.GradOperation(get_all=True)(msp.sparse.linalg.cg)
grad_a, grad_b = grad_net(a, b)[:2]
onp.testing.assert_allclose(expect_grad_a, grad_a.asnumpy(), **kw)
onp.testing.assert_allclose(expect_grad_b, grad_b.asnumpy(), **kw)
# Cell
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.sum = ops.ReduceSum()
self.cg = msp.sparse.linalg.cg
def construct(self, a, b):
x, _ = self.cg(a, b)
return self.sum(x)
grad_net = ops.GradOperation(get_all=True)(Net())
grad_a, grad_b = grad_net(a, b)[:2]
onp.testing.assert_allclose(expect_grad_a, grad_a.asnumpy(), **kw)
onp.testing.assert_allclose(expect_grad_b, grad_b.asnumpy(), **kw)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('n', [3, 5, 7])
@pytest.mark.parametrize('dtype,tol', [(onp.float64, 7), (onp.float32, 3)])
@pytest.mark.parametrize('preconditioner', [None, 'identity', 'exact', 'random'])
def test_gmres_incremental_against_scipy(n, tol, dtype, preconditioner):
"""
Feature: ALL TO ALL
Description: test cases for [N x N] X [N X 1]
Expectation: the result match scipy
"""
onp.random.seed(0)
context.set_context(mode=context.PYNATIVE_MODE)
A = create_full_rank_matrix((n, n), dtype)
b = onp.random.rand(n).astype(dtype)
x0 = onp.zeros_like(b).astype(dtype)
M = _fetch_preconditioner(preconditioner, A)
scipy_x, _ = osp.sparse.linalg.gmres(A, b, x0, tol=1e-07, atol=0, M=M)
A = Tensor(A)
b = Tensor(b)
x0 = Tensor(x0)
if M is not None:
M = Tensor(M)
gmres_x, _ = msp.sparse.linalg.gmres(A, b, x0, tol=1e-07, atol=0, solve_method='incremental', M=M)
onp.testing.assert_almost_equal(scipy_x, gmres_x.asnumpy(), decimal=tol)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('n', [3, 5, 7])
@pytest.mark.parametrize('dtype, tol', [(onp.float64, 7), (onp.float32, 3)])
@pytest.mark.parametrize('preconditioner', [None, 'identity', 'exact', 'random'])
def test_gmres_incremental_against_scipy_graph(n, tol, dtype, preconditioner):
"""
Feature: ALL TO ALL
Description: test cases for [N x N] X [N X 1]
Expectation: the result match scipy
"""
onp.random.seed(0)
context.set_context(mode=context.GRAPH_MODE)
A = create_full_rank_matrix((n, n), dtype)
b = onp.random.rand(n).astype(dtype)
x0 = onp.zeros_like(b).astype(dtype)
M = _fetch_preconditioner(preconditioner, A)
scipy_x, _ = osp.sparse.linalg.gmres(A, b, x0, tol=1e-07, atol=0, M=M)
A = Tensor(A)
b = Tensor(b)
x0 = Tensor(x0)
if M is not None:
M = Tensor(M)
gmres_x, _ = msp.sparse.linalg.gmres(A, b, x0, tol=1e-07, atol=0, solve_method='incremental', M=M)
onp.testing.assert_almost_equal(scipy_x, gmres_x.asnumpy(), decimal=tol)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('n', [4, 5, 6])
@pytest.mark.parametrize('dtype, tol', [(onp.float64, 7), (onp.float32, 3)])
@pytest.mark.parametrize('preconditioner', [None, 'identity', 'exact', 'random'])
@pytest.mark.parametrize('maxiter', [1, 2])
def test_pynative_batched_gmres_against_scipy(n, dtype, tol, preconditioner, maxiter):
"""
Feature: ALL TO ALL
Description: test cases for gmres
Expectation: the result match scipy
"""
onp.random.seed(0)
context.set_context(mode=context.PYNATIVE_MODE)
shape = (n, n)
a = create_full_rank_matrix(shape, dtype)
b = onp.random.rand(n).astype(dtype=dtype)
M = _fetch_preconditioner(preconditioner, a)
tensor_a = Tensor(a)
tensor_b = Tensor(b)
M = Tensor(M) if M is not None else M
osp_x, _ = osp.sparse.linalg.gmres(a, b, maxiter=maxiter, atol=1e-6)
msp_x, _ = msp.sparse.linalg.gmres(tensor_a, tensor_b, maxiter=maxiter, M=M, atol=1e-6,
solve_method='batched')
onp.testing.assert_almost_equal(msp_x.asnumpy(), osp_x, decimal=tol)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('n', [5, 6])
@pytest.mark.parametrize('dtype, tol', [(onp.float64, 7), (onp.float32, 3)])
@pytest.mark.parametrize('preconditioner', [None, 'identity', 'exact', 'random'])
@pytest.mark.parametrize('maxiter', [1, 2])
def test_graph_batched_gmres_against_scipy(n, dtype, tol, preconditioner, maxiter):
"""
Feature: ALL TO ALL
Description: test cases for gmres
Expectation: the result match scipy
"""
onp.random.seed(0)
context.set_context(mode=context.GRAPH_MODE)
shape = (n, n)
a = create_full_rank_matrix(shape, dtype)
b = onp.random.rand(n).astype(dtype=dtype)
tensor_a = Tensor(a)
tensor_b = Tensor(b)
M = _fetch_preconditioner(preconditioner, a)
M = Tensor(M) if M is not None else M
osp_x, _ = osp.sparse.linalg.gmres(a, b, maxiter=maxiter, atol=0.0)
msp_x, _ = msp.sparse.linalg.gmres(tensor_a, tensor_b, maxiter=maxiter, M=M, atol=0.0, solve_method='batched')
onp.testing.assert_almost_equal(msp_x.asnumpy(), osp_x, decimal=tol)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('dtype_tol', [(onp.float64, 1e-10)])
@pytest.mark.parametrize('shape', [(4, 4), (7, 7)])
@pytest.mark.parametrize('preconditioner', [None, 'identity', 'exact', 'random'])
@pytest.mark.parametrize('maxiter', [1, 3])
def test_bicgstab_against_scipy(dtype_tol, shape, preconditioner, maxiter):
"""
Feature: ALL TO ALL
Description: test cases for bicgstab
Expectation: the result match scipy
"""
onp.random.seed(0)
dtype, tol = dtype_tol
A = create_full_rank_matrix(shape, dtype)
b = onp.random.random(shape[:1]).astype(dtype)
M = _fetch_preconditioner(preconditioner, A)
osp_res = scipy.sparse.linalg.bicgstab(A, b, M=M, maxiter=maxiter, atol=tol, tol=tol)[0]
A = Tensor(A)
b = Tensor(b)
M = Tensor(M) if M is not None else M
# using PYNATIVE MODE
context.set_context(mode=context.PYNATIVE_MODE)
msp_res_dyn = msp.sparse.linalg.bicgstab(A, b, M=M, maxiter=maxiter, atol=tol, tol=tol)[0]
# using GRAPH MODE
context.set_context(mode=context.GRAPH_MODE)
msp_res_sta = msp.sparse.linalg.bicgstab(A, b, M=M, maxiter=maxiter, atol=tol, tol=tol)[0]
kw = {"atol": tol, "rtol": tol}
onp.testing.assert_allclose(osp_res, msp_res_dyn.asnumpy(), **kw)
onp.testing.assert_allclose(osp_res, msp_res_sta.asnumpy(), **kw)
| 42.159041
| 115
| 0.674022
| 2,874
| 19,351
| 4.400487
| 0.115518
| 0.056931
| 0.053135
| 0.033209
| 0.885032
| 0.878469
| 0.868506
| 0.864395
| 0.852692
| 0.833716
| 0
| 0.175085
| 0.16826
| 19,351
| 458
| 116
| 42.251092
| 0.610687
| 0.107901
| 0
| 0.800623
| 0
| 0
| 0.038459
| 0
| 0
| 0
| 0
| 0
| 0.074766
| 1
| 0.049844
| false
| 0
| 0.031153
| 0.003115
| 0.102804
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c0689084e6fa06f6b2c852b2d81b8edec085dc29
| 25,723
|
py
|
Python
|
src/amuse/test/suite/core_tests/test_incode_storage.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 131
|
2015-06-04T09:06:57.000Z
|
2022-02-01T12:11:29.000Z
|
src/amuse/test/suite/core_tests/test_incode_storage.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 690
|
2015-10-17T12:18:08.000Z
|
2022-03-31T16:15:58.000Z
|
src/amuse/test/suite/core_tests/test_incode_storage.py
|
rieder/amuse
|
3ac3b6b8f922643657279ddee5c8ab3fc0440d5e
|
[
"Apache-2.0"
] | 102
|
2015-01-22T10:00:29.000Z
|
2022-02-09T13:29:43.000Z
|
from amuse.test import amusetest
from amuse.datamodel.incode_storage import *
import numpy
import time
from amuse.units import units
from amuse.units import constants
from amuse.units import nbody_system
class TestParticles(amusetest.TestCase):
def test1(self):
class Code(object):
def __init__(self):
# x,y,z,mass
self.data = []
self.get_position_called = False
self.set_position_called = False
def get_number_of_particles(self):
return 0 if not self.data else len(self.data[0])
def get_position(self,index):
self.get_position_called = True
data_to_return = [(self.data[0][i], self.data[1][i], self.data[2][i]) for i in index]
data_to_return = numpy.asarray(data_to_return).reshape(3,-1)
return [units.m(x) for x in data_to_return]
def set_position(self,index,x,y,z):
self.set_position_called = True
pass
def new_particle(self, x, y, z):
x = x.value_in(units.m)
y = y.value_in(units.m)
z = z.value_in(units.m)
self.data = [x,y,z]
return [i for i in range(len(x))]
code = Code()
storage = InCodeAttributeStorage(
code,
NewParticleMethod(code.new_particle,("x","y","z")),
None,
code.get_number_of_particles,
[],
[ParticleGetAttributesMethod(code.get_position,("x","y","z")),],
name_of_the_index = "index"
)
self.assertEqual(len(storage), 0)
self.assertEqual(storage.get_defined_attribute_names(), ["x","y","z"])
self.assertFalse(code.get_position_called)
storage.get_values_in_store([],["x","y","z"])
self.assertFalse(code.get_position_called)
storage.add_particles_to_store(
[1,2,3,4],
["x","y","z"],
[
units.m([1,2,3,4]),
units.m([2,3,4,5]),
units.m([3,4,5,6])
]
)
self.assertEqual(len(storage), 4)
def test2(self):
class Code(object):
def __init__(self):
# x,y,z,mass
self.data = []
self.get_position_called = False
self.set_position_called = False
self.get_mass_called = False
self.set_mass_called = False
def get_number_of_particles(self):
return 0 if not self.data else len(self.data[0])
def get_position(self,index):
self.get_position_called = True
data_to_return = [(self.data[0][i], self.data[1][i], self.data[2][i]) for i in index]
data_to_return = numpy.asarray(data_to_return).reshape(3,-1)
return [units.m(x) for x in data_to_return]
def get_mass(self,index):
self.get_mass_called = True
data_to_return = [self.data[3][i] for i in index]
return units.kg(data_to_return)
def set_position(self,index,x,y,z):
self.set_position_called = True
pass
def set_mass(self,index,mass):
self.set_mass_called = True
pass
def new_particle(self, x, y, z, mass):
x = x.value_in(units.m)
y = y.value_in(units.m)
z = z.value_in(units.m)
mass = mass.value_in(units.kg)
self.data = [x,y,z, mass]
return [i for i in range(len(x))]
code = Code()
storage = InCodeAttributeStorage(
code,
NewParticleMethod(code.new_particle,("x","y","z","mass")),
None,
code.get_number_of_particles,
[],
[
ParticleGetAttributesMethod(code.get_position,("x","y","z")),
ParticleGetAttributesMethod(code.get_mass,("mass",)),
],
name_of_the_index = "index"
)
storage.add_particles_to_store(
[1,2,3,4],
["x","y","z", "mass"],
[
units.m([1,2,3,4]),
units.m([2,3,4,5]),
units.m([3,4,5,6]),
units.kg([13,14,15,16]),
]
)
self.assertEqual(len(storage), 4)
self.assertEqual(storage.get_defined_attribute_names(), [ "mass", "x","y","z"])
self.assertFalse(code.get_position_called)
self.assertFalse(code.get_mass_called)
indices = storage.get_indices_of([2,3])
x,y,mass = storage.get_values_in_store(indices,["x","y","mass"])
self.assertTrue(code.get_position_called)
self.assertTrue(code.get_mass_called)
self.assertEqual(x[1], 3 | units.m)
self.assertEqual(mass[1], 15 | units.kg)
def test3(self):
class Code(object):
def __init__(self):
# mass
self.data = []
self.get_mass_called = False
self.set_mass_called = False
def get_number_of_particles(self):
return 0 if not self.data else len(self.data[0])
def get_mass(self,index):
self.get_mass_called = True
data_to_return = [self.data[0][i] for i in index]
return units.kg(data_to_return)
def set_mass(self,index,mass):
self.set_mass_called = True
pass
def new_particle(self, mass):
mass = mass.value_in(units.kg)
self.data = [mass]
return [i for i in range(len(mass))]
code = Code()
storage = InCodeAttributeStorage(
code,
NewParticleMethod(code.new_particle,("mass",)),
None,
code.get_number_of_particles,
[],
[
ParticleGetAttributesMethod(code.get_mass,("mass",)),
],
name_of_the_index = "index"
)
storage.add_particles_to_store(
[1,2,3,4],
["mass"],
[
units.kg([1,2,3,4]),
]
)
self.assertEqual(len(storage), 4)
self.assertEqual(storage.get_defined_attribute_names(), ["mass",])
indices = storage.get_indices_of([2,3])
index,mass = storage.get_values_in_store(indices,["index_in_code","mass"])
self.assertTrue(code.get_mass_called)
self.assertEqual(index[0], 1)
self.assertEqual(mass[0], 2 | units.kg)
self.assertEqual(index[1], 2)
self.assertEqual(mass[1], 3 | units.kg)
def test4(self):
class Code(object):
def __init__(self):
# mass
self.data = []
self.get_mass_called = False
self.set_mass_called = False
self.number_of_particles = 0
def get_number_of_particles(self):
return self.number_of_particles
def get_mass(self,index):
self.get_mass_called = True
data_to_return = [self.data[i] for i in index]
return units.kg(data_to_return)
def set_mass(self,index,mass):
self.set_mass_called = True
pass
def new_particle(self, mass):
mass = mass.value_in(units.kg)
self.data = mass
self.number_of_particles = len(self.data)
return [i for i in range(len(mass))]
code = Code()
storage = InCodeAttributeStorage(
code,
NewParticleMethod(code.new_particle,("mass",)),
None,
code.get_number_of_particles,
[],
[
ParticleGetAttributesMethod(code.get_mass,("mass",)),
],
name_of_the_index = "index"
)
storage.add_particles_to_store(
numpy.asarray([1,2,3,4], dtype='uint64'),
["mass"],
[
units.kg([1,2,3,4]),
]
)
self.assertEqual(len(storage), 4)
storage._remove_indices([1,2,])
code.number_of_particles = 2
indices = storage.get_indices_of([1,4])
index,mass = storage.get_values_in_store(indices,["index_in_code","mass"])
self.assertEqual(index[0], 0)
self.assertEqual(index[1], 3)
self.assertEqual(mass[0], 1 | units.kg)
self.assertEqual(mass[1], 4 | units.kg)
self.assertEqual(len(storage), 2)
storage._add_indices([4,5])
code.data = numpy.concatenate((code.data, [5, 6]))
code.number_of_particles = 4
self.assertEqual(len(storage), 4)
indices = storage.get_indices_of(storage.particle_keys)
mass, = storage.get_values_in_store(indices,["mass"])
self.assertEqual(mass[0], 1 | units.kg)
self.assertEqual(mass[1], 4 | units.kg)
self.assertEqual(mass[2], 5 | units.kg)
self.assertEqual(mass[3], 6 | units.kg)
storage._remove_indices([4,])
code.number_of_particles = 3
self.assertEqual(len(storage), 3)
indices = storage.get_indices_of(storage.particle_keys)
mass, = storage.get_values_in_store(indices,["mass"])
self.assertEqual(mass[0], 1 | units.kg)
self.assertEqual(mass[1], 4 | units.kg)
self.assertEqual(mass[2], 6 | units.kg)
def test5(self):
class Code(object):
def __init__(self):
self.data = []
self.number_of_particles = 0
def get_number_of_particles(self):
return self.number_of_particles
def get_mass(self,index):
data_to_return = [self.data[i][0] for i in index]
return units.kg(data_to_return)
def get_children(self,index):
return [(self.data[i][1]) for i in index], [(self.data[i][2]) for i in index]
def new_particle(self, mass):
mass = mass.value_in(units.kg)
self.data = [[x,-1,-1] for x in mass]
self.number_of_particles = len(self.data)
return [i for i in range(len(mass))]
code = Code()
children_getter = ParticleGetAttributesMethod(
code.get_children,
('child1', 'child2',)
)
children_getter.index_output_attributes = set(['child1','child2'])
storage = InCodeAttributeStorage(
code,
NewParticleMethod(code.new_particle,("mass",)),
None,
code.get_number_of_particles,
[],
[
ParticleGetAttributesMethod(code.get_mass,("mass",)),
children_getter
],
name_of_the_index = "index"
)
storage.add_particles_to_store(
numpy.asarray([100,200,300,400], dtype='uint64'),
["mass"],
[
units.kg([1,2,3,4]),
]
)
self.assertEqual(len(storage), 4)
indices = storage.get_indices_of([100,400])
mass = storage.get_values_in_store(indices,["mass",])[0]
self.assertEqual(mass[0], 1.0 | units.kg)
self.assertEqual(mass[1], 4.0 | units.kg)
code.data[0][1] = 1
code.data[0][2] = 2
indices = storage.get_indices_of([100])
child1,child2 = storage.get_values_in_store(indices,['child1', 'child2'])
self.assertEqual(child1[0].number, 200)
self.assertEqual(child2[0].number, 300)
def test7(self):
class Code(object):
def __init__(self):
# x,y,z,mass
self.data = []
self.get_position_called = False
self.set_position_called = False
self.get_mass_called = False
self.set_mass_called = False
def get_number_of_particles(self):
return 0 if not self.data else len(self.data[0])
def get_position(self,index):
self.get_position_called = True
data_to_return = [(self.data[0][i], self.data[1][i], self.data[2][i]) for i in index]
data_to_return = numpy.asarray(data_to_return).reshape(3,-1)
return [units.m(x) for x in data_to_return]
def get_mass(self,index):
self.get_mass_called = True
data_to_return = [self.data[3][i] for i in index]
return data_to_return
def set_position(self,index,x,y,z):
self.set_position_called = True
pass
def set_mass(self,index,mass):
self.set_mass_called = True
for i,j in enumerate(index):
self.data[3][j] = mass[i]
return [0 for i in range(len(index))]
def new_particle(self, x, y, z, mass):
x = x.value_in(units.m)
y = y.value_in(units.m)
z = z.value_in(units.m)
mass = mass
self.data = [x,y,z,mass]
return [i for i in range(len(x))]
code = Code()
storage = InCodeAttributeStorage(
code,
NewParticleMethod(code.new_particle,("x","y","z","mass")),
None,
code.get_number_of_particles,
[
ParticleSetAttributesMethod(code.set_position,("x","y","z")),
ParticleSetAttributesMethod(code.set_mass,("mass",)),
],
[
ParticleGetAttributesMethod(code.get_position,("x","y","z")),
ParticleGetAttributesMethod(code.get_mass,("mass",)),
],
name_of_the_index = "index"
)
storage.add_particles_to_store(
[1,2,3,4],
["x","y","z", "mass"],
[
units.m([1,2,3,4]),
units.m([2,3,4,5]),
units.m([3,4,5,6]),
numpy.asarray([13.0,14.0,15,16]),
]
)
self.assertEqual(len(storage), 4)
self.assertEqual(storage.get_defined_attribute_names(), [ "mass", "x","y","z"])
self.assertFalse(code.get_position_called)
self.assertFalse(code.get_mass_called)
indices = storage.get_indices_of([2,3])
x,y,mass = storage.get_values_in_store(indices,["x","y","mass"])
self.assertTrue(code.get_position_called)
self.assertTrue(code.get_mass_called)
self.assertEqual(x[1], 3 | units.m)
self.assertEqual(mass[1], 15 )
self.assertEqual(mass[0], 14 )
storage.set_values_in_store(indices,["x","y", "z", "mass"], [[10,11] | units.m , [12,14] | units.m, [12,14] | units.m, [40.0, 50.0]])
x,y,mass = storage.get_values_in_store(indices,["x","y","mass"])
self.assertEqual(mass[1], 50 )
self.assertEqual(mass[0], 40 )
class TestGrids(amusetest.TestCase):
def test1(self):
class Code(object):
def get_range(self):
return (1,10,2,5,3,6)
def get_ijk(self,i,j,k):
return units.m(i), units.m(j), units.m(k)
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[],
[ParticleGetAttributesMethod(code.get_ijk,("i","j","k")),],
)
self.assertEqual(storage.storage_shape(), (10, 4, 4))
self.assertEqual(storage.get_defined_attribute_names(), ["i","j","k"])
values = storage.get_values_in_store((0,1,1), ("i",))
self.assertEqual(len(values), 1)
self.assertEqual(values[0], 1 | units.m)
values = storage.get_values_in_store((0,1,1), ("k","j","i",))
self.assertEqual(values[0], 4 | units.m)
self.assertEqual(values[1], 3 | units.m)
self.assertEqual(values[2], 1 | units.m)
def test2(self):
class Code(object):
def get_range(self):
return (1,10,2,5,3,6)
def get_ijk(self,i,j,k):
return units.m(i), units.m(j), units.m(k)
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[],
[ParticleGetAttributesMethod(code.get_ijk,("i","j","k")),],
)
values = storage.get_values_in_store(numpy.s_[0:2], ("i",))
self.assertEqual(len(values), 1)
self.assertEqual(len(values[0]), 2)
self.assertEqual(values[0].number.shape, (2,4,4))
self.assertEqual(values[0][0][0][0], 1 | units.m)
self.assertEqual(values[0][1][0][0], 2 | units.m)
def test3(self):
shape = (11,5,5)
class Code(object):
def __init__(self):
self.storage = numpy.arange(shape[0]*shape[1]*shape[2]).reshape(shape)
def get_range(self):
return (0,shape[0]-1,0,shape[1]-1,0,shape[2]-1)
def get_a(self,i_s,j_s,k_s):
return units.m.new_quantity(numpy.asarray([(self.storage[i][j][k]) for i,j,k in zip(i_s, j_s, k_s)]))
def set_a(self, i_s, j_s, k_s, values):
#~ print i_s, j_s, k_s
#~ print "VALUES:", values
index = 0
for i,j,k in zip(i_s, j_s, k_s):
self.storage[i][j][k] = values[index].value_in(units.m)
index += 1
#~ print index
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[ParticleSetAttributesMethod(code.set_a,("a",)),],
[ParticleGetAttributesMethod(code.get_a,("a",)),],
)
values = storage.get_values_in_store(None, ("a",))
self.assertTrue(numpy.all(values[0].value_in(units.m) == code.storage))
#self.assertTrue(False)
values = storage.get_values_in_store((0,0,0), ("a",))
self.assertEqual(values[0], 0 | units.m)
storage.set_values_in_store((0,0,0), ("a",), [11.0 | units.m,])
values = storage.get_values_in_store((0,0,0), ("a",))
self.assertEqual(values[0], 11.0 | units.m)
values = storage.get_values_in_store((0,0), ("a",))
storage.set_values_in_store((0,0), ("a",), [[11.0, 12.0, 13.0, 14.0, 15.0]| units.m,])
self.assertTrue(numpy.all(code.storage[0][0] == [11.0, 12.0, 13.0, 14.0, 15.0]))
def test4(self):
class Code(object):
def get_range(self, d, l):
return (1,10,2,5,3,6)
def get_ijk(self,i,j,k, d, l):
return units.m(d), units.m(l), units.m(k)
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[],
[ParticleGetAttributesMethod(code.get_ijk,("i","j","k")),],
extra_keyword_arguments_for_getters_and_setters = {'d':1, 'l':2},
)
self.assertEqual(storage.storage_shape(), (10, 4, 4))
self.assertEqual(storage.get_defined_attribute_names(), ["i","j","k"])
values = storage.get_values_in_store((0,1,1), ("i",))
self.assertEqual(len(values), 1)
self.assertEqual(values[0], 1 | units.m)
values = storage.get_values_in_store((0,1,1), ("k","j","i",))
self.assertEqual(values[0], 4 | units.m)
self.assertEqual(values[1], 2 | units.m)
self.assertEqual(values[2], 1 | units.m)
def test5(self):
class Code(object):
def get_range(self):
return (1,10,2,5,3,6)
def get_ijk(self,i,j,k):
return units.m(i), units.m(j), units.m(k)
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[],
[ParticleGetAttributesMethod(code.get_ijk,("i","j","k")),],
)
self.assertEqual(storage.storage_shape(), (10, 4, 4))
self.assertEqual(storage.get_defined_attribute_names(), ["i","j","k"])
values = storage.get_values_in_store(None, ("i",))
self.assertEqual(len(values), 1)
self.assertEqual(values[0].number.ndim, 3)
def test6(self):
shape = (11,5,5)
class Code(object):
def __init__(self):
self.storage = numpy.arange(shape[0]*shape[1]*shape[2]).reshape(shape)
def get_range(self):
return (0,shape[0]-1,0,shape[1]-1,0,shape[2]-1)
def get_a(self,i_s,j_s,k_s):
return numpy.asarray([(self.storage[i][j][k]) for i,j,k in zip(i_s, j_s, k_s)])
def set_a(self, i_s, j_s, k_s, values):
#~ print i_s, j_s, k_s
#~ print "VALUES:", values
index = 0
for i,j,k in zip(i_s, j_s, k_s):
self.storage[i][j][k] = values[index]
index += 1
#~ print index
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[ParticleSetAttributesMethod(code.set_a,("a",)),],
[ParticleGetAttributesMethod(code.get_a,("a",)),],
)
values = storage.get_values_in_store(None, ("a",))
self.assertTrue(numpy.all(values[0] == code.storage))
values = storage.get_values_in_store((0,0,0), ("a",))
self.assertEqual(values[0], 0)
storage.set_values_in_store((0,0,0), ("a",), [11.0,])
values = storage.get_values_in_store((0,0,0), ("a",))
self.assertEqual(values[0], 11.0)
values = storage.get_values_in_store((0,0), ("a",))[0]
self.assertTrue(numpy.all(values == [11.0, 1.0, 2.0, 3.0, 4.0]))
storage.set_values_in_store((0,0), ("a",), [[11.0, 12.0, 13.0, 14.0, 15.0],])
self.assertTrue(numpy.all(code.storage[0][0] == [11.0, 12.0, 13.0, 14.0, 15.0]))
def test7(self):
shape = (11,5,5)
class Code(object):
def __init__(self):
self.storage = numpy.arange(shape[0]*shape[1]*shape[2]).reshape(shape)
def get_range(self):
return (0,shape[0]-1,0,shape[1]-1,0,shape[2]-1)
def get_a(self,i_s,j_s,k_s):
return numpy.asarray([(self.storage[i][j][k]) for i,j,k in zip(i_s, j_s, k_s)])
def set_a(self, i_s, j_s, k_s, values):
index = 0
for i,j,k in zip(i_s, j_s, k_s):
self.storage[i][j][k] = values[index]
index += 1
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[ParticleSetAttributesMethod(code.set_a,("a",)),],
[ParticleGetAttributesMethod(code.get_a,("a",)),],
)
values = storage.get_values_in_store((), ())
self.assertTrue(values==[])
values = storage.get_values_in_store((0,0,1,), ("a",))
self.assertTrue(values[0]==1)
def test8(self):
class Code(object):
def __init__(self):
self.storage = 1. | units.m
def get_range(self):
return ()
def get_a(self):
return self.storage
def set_a(self, value):
self.storage=value
code = Code()
storage = InCodeGridAttributeStorage(
code,
code.get_range,
[ParticleSetAttributesMethod(code.set_a,("a",)),],
[ParticleGetAttributesMethod(code.get_a,("a",)),],
)
self.assertEqual(storage.storage_shape(), ())
self.assertEqual(storage.get_defined_attribute_names(), ['a'])
values = storage.get_values_in_store((), ("a",))
self.assertEqual(len(values), 1)
print(values,"<")
self.assertEqual(values[0], 1 | units.m)
| 35.333791
| 141
| 0.49135
| 3,033
| 25,723
| 3.984174
| 0.044181
| 0.088133
| 0.034426
| 0.040218
| 0.872807
| 0.843264
| 0.830023
| 0.809335
| 0.787653
| 0.779626
| 0
| 0.035187
| 0.375773
| 25,723
| 727
| 142
| 35.382393
| 0.717382
| 0.007075
| 0
| 0.710952
| 0
| 0
| 0.012692
| 0
| 0
| 0
| 0
| 0
| 0.159785
| 1
| 0.129264
| false
| 0.010772
| 0.012567
| 0.041293
| 0.238779
| 0.001795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbe972302437b480308156b27377c8886c935f95
| 4,834
|
py
|
Python
|
exp/fig14b/logtable_def.py
|
SJTU-IPADS/fgnn-artifacts
|
c96e7ec8204d767152958dc63a764466e90424fd
|
[
"Apache-2.0"
] | 23
|
2022-01-25T13:28:51.000Z
|
2022-03-23T07:05:47.000Z
|
exp/fig14b/logtable_def.py
|
SJTU-IPADS/gnnlab
|
5c73564e4a9bd5deeff7eed0b923c115ccba34d7
|
[
"Apache-2.0"
] | null | null | null |
exp/fig14b/logtable_def.py
|
SJTU-IPADS/gnnlab
|
5c73564e4a9bd5deeff7eed0b923c115ccba34d7
|
[
"Apache-2.0"
] | 1
|
2022-02-28T18:48:56.000Z
|
2022-02-28T18:48:56.000Z
|
import os
import sys
sys.path.append(os.path.join(os.getcwd(), '../common'))
from runner_helper2 import *
def get_dgl_logtable():
return LogTable(
num_row=8,
num_col=1
).update_col_definition(
col_id=0,
definition='epoch_time'
).update_row_definition(
row_id=0,
col_range=[0, 0],
devices='0',
).update_row_definition(
row_id=1,
col_range=[0, 0],
devices='0 1',
).update_row_definition(
row_id=2,
col_range=[0, 0],
devices='0 1 2',
).update_row_definition(
row_id=3,
col_range=[0, 0],
devices='0 1 2 3',
).update_row_definition(
row_id=4,
col_range=[0, 0],
devices='0 1 2 3 4',
).update_row_definition(
row_id=5,
col_range=[0, 0],
devices='0 1 2 3 4 5',
).update_row_definition(
row_id=6,
col_range=[0, 0],
devices='0 1 2 3 4 5 6',
).update_row_definition(
row_id=7,
col_range=[0, 0],
devices='0 1 2 3 4 5 6 7',
).create()
def get_fgnn_logtable():
return LogTable(
num_row=18,
num_col=1
).update_col_definition(
col_id=0,
definition='pipeline_train_epoch_time'
).update_row_definition(
row_id=0,
col_range=[0, 0],
num_sample_worker=1,
num_train_worker=1
).update_row_definition(
row_id=1,
col_range=[0, 0],
num_sample_worker=1,
num_train_worker=2
).update_row_definition(
row_id=2,
col_range=[0, 0],
num_sample_worker=1,
num_train_worker=3
).update_row_definition(
row_id=3,
col_range=[0, 0],
num_sample_worker=1,
num_train_worker=4
).update_row_definition(
row_id=4,
col_range=[0, 0],
num_sample_worker=1,
num_train_worker=5
).update_row_definition(
row_id=5,
col_range=[0, 0],
num_sample_worker=1,
num_train_worker=6
).update_row_definition(
row_id=6,
col_range=[0, 0],
num_sample_worker=1,
num_train_worker=7
).update_row_definition(
row_id=7,
col_range=[0, 0],
num_sample_worker=2,
num_train_worker=1
).update_row_definition(
row_id=8,
col_range=[0, 0],
num_sample_worker=2,
num_train_worker=2
).update_row_definition(
row_id=9,
col_range=[0, 0],
num_sample_worker=2,
num_train_worker=3
).update_row_definition(
row_id=10,
col_range=[0, 0],
num_sample_worker=2,
num_train_worker=4
).update_row_definition(
row_id=11,
col_range=[0, 0],
num_sample_worker=2,
num_train_worker=5
).update_row_definition(
row_id=12,
col_range=[0, 0],
num_sample_worker=2,
num_train_worker=6
).update_row_definition(
row_id=13,
col_range=[0, 0],
num_sample_worker=3,
num_train_worker=1
).update_row_definition(
row_id=14,
col_range=[0, 0],
num_sample_worker=3,
num_train_worker=2
).update_row_definition(
row_id=15,
col_range=[0, 0],
num_sample_worker=3,
num_train_worker=3
).update_row_definition(
row_id=16,
col_range=[0, 0],
num_sample_worker=3,
num_train_worker=4
).update_row_definition(
row_id=17,
col_range=[0, 0],
num_sample_worker=3,
num_train_worker=5
).create()
def get_sgnn_logtable():
return LogTable(
num_row=8,
num_col=4
).update_col_definition(
col_id=0,
definition='epoch_time:sample_total'
).update_col_definition(
col_id=1,
definition='epoch_time:copy_time'
).update_col_definition(
col_id=2,
definition='epoch_time:train_total'
).update_col_definition(
col_id=3,
definition='epoch_time:total'
).update_row_definition(
row_id=0,
col_range=[0, 3],
num_worker=1,
).update_row_definition(
row_id=1,
col_range=[0, 3],
num_worker=2,
).update_row_definition(
row_id=2,
col_range=[0, 3],
num_worker=3,
).update_row_definition(
row_id=3,
col_range=[0, 3],
num_worker=4,
).update_row_definition(
row_id=4,
col_range=[0, 3],
num_worker=5,
).update_row_definition(
row_id=5,
col_range=[0, 3],
num_worker=6,
).update_row_definition(
row_id=6,
col_range=[0, 3],
num_worker=7,
).update_row_definition(
row_id=7,
col_range=[0, 3],
num_worker=8,
).create()
| 24.414141
| 55
| 0.56475
| 671
| 4,834
| 3.684054
| 0.080477
| 0.123786
| 0.261327
| 0.302589
| 0.90089
| 0.879854
| 0.837783
| 0.834142
| 0.804207
| 0.667071
| 0
| 0.06229
| 0.322507
| 4,834
| 197
| 56
| 24.538071
| 0.692519
| 0
| 0
| 0.764398
| 0
| 0
| 0.039098
| 0.014481
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015707
| true
| 0
| 0.015707
| 0.015707
| 0.04712
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
fbe984bc14d9dc01929c991f789056a6acdbdac1
| 6,909
|
py
|
Python
|
tube/tests/test_example.py
|
adamgilman/tube-python
|
3d94e79f7d367eed95ed68b53d0ab13a36cc3219
|
[
"BSD-3-Clause"
] | 5
|
2017-01-26T00:06:08.000Z
|
2020-06-03T16:07:09.000Z
|
tube/tests/test_example.py
|
adamgilman/tube-python
|
3d94e79f7d367eed95ed68b53d0ab13a36cc3219
|
[
"BSD-3-Clause"
] | null | null | null |
tube/tests/test_example.py
|
adamgilman/tube-python
|
3d94e79f7d367eed95ed68b53d0ab13a36cc3219
|
[
"BSD-3-Clause"
] | 1
|
2021-11-22T16:23:14.000Z
|
2021-11-22T16:23:14.000Z
|
import unittest
from tube.tubeAPI import Tube
from tube.tubeAPI import TubeLine, TubeStation
from tube.tubeAPI import TubeLineManager, TubeStationManager
class EaseOfUse(unittest.TestCase):
def setUp(self):
self.tube = Tube()
def test_lines(self):
self.assertEqual( type(self.tube.lines), TubeLineManager )
self.assertEqual( type(self.tube.lines['C']), TubeLine )
self.assertEqual( type(self.tube.stations) , TubeStationManager)
self.assertEqual( type(self.tube.stations['OXC']) , TubeStation)
def test_TFLObject():
'''
Test implementation of TFL object
[ note: time dependent test, will not pass]
>>> from tflTube import TFL
>>> tfl = TFL()
>>> tfl.map.get(linecode='V')
<tflTube.TFLLine: Victoria>
>>> tfl.map.get(linecode='V').getStations()
{'VIC': <tflTube.TFLStation: Victoria>, 'WAL': <tflTube.TFLStation: Walthamstow Central>, 'PIM': <tflTube.TFLStation: Pimlico>, 'GPK': <tflTube.TFLStation: Green Park>, 'WST': <tflTube.TFLStation: Warren Street>, 'BRX': <tflTube.TFLStation: Brixton>, 'FPK': <tflTube.TFLStation: Finsbury Park>, 'STK': <tflTube.TFLStation: Stockwell>, 'KXX': <tflTube.TFLStation: King's Cross St Pancras>, 'TTH': <tflTube.TFLStation: Tottenham Hale>, 'HBY': <tflTube.TFLStation: Highbury and Islington>, 'VUX': <tflTube.TFLStation: Vauxhall>, 'BHR': <tflTube.TFLStation: Blackhorse Road>, 'SVS': <tflTube.TFLStation: Seven Sisters>, 'EUS': <tflTube.TFLStation: Euston>, 'OXC': <tflTube.TFLStation: Oxford Circus>}
>>> tfl.map.get(stationcode="OXC")
<tflTube.TFLStation: Oxford Circus>
>>> tfl.map.get(linecode="B")
<tflTube.TFLLine: Bakerloo>
>>> tfl.map.get(linecode="B", stationcode="OXC").platforms
{u'Northbound - Platform 4': <tflTube.TFLPlatform: Bakerloo Northbound - Platform 4 >, u'Southbound - Platform 3': <tflTube.TFLPlatform: Bakerloo Southbound - Platform 3 >}
>>> tfl.map.get(linecode="V").getAllTrains()
{u'1019265': <tflTube.TFLTrain LCID(1019265) on Victoria Line at Between Highbury & Islington and Kings Cross St. P>, u'1019894': <tflTube.TFLTrain LCID(1019894) on Victoria Line at At Brixton Platform 2>, u'1020196': <tflTube.TFLTrain LCID(1020196) on Victoria Line at At Victoria>, u'1018651': <tflTube.TFLTrain LCID(1018651) on Victoria Line at At Blackhorse Road>, u'1019837': <tflTube.TFLTrain LCID(1019837) on Victoria Line at Between Kings Cross St. Pancras and Highbury & Isl>, u'1018285': <tflTube.TFLTrain LCID(1018285) on Victoria Line at Between Seven Sisters and Finsbury Park>, u'1018931': <tflTube.TFLTrain LCID(1018931) on Victoria Line at Between Tottenham Hale and Blackhorse Road>, u'1019444': <tflTube.TFLTrain LCID(1019444) on Victoria Line at At Vauxhall>, u'1019373': <tflTube.TFLTrain LCID(1019373) on Victoria Line at Between Finsbury Park and Seven Sisters>, u'1016438': <tflTube.TFLTrain LCID(1016438) on Victoria Line at Between Oxford Circus and Warren Street>, u'1018584': <tflTube.TFLTrain LCID(1018584) on Victoria Line at Between Kings Cross St. Pancras and Euston>, u'1016265': <tflTube.TFLTrain LCID(1016265) on Victoria Line at Approaching Stockwell>, u'1019561': <tflTube.TFLTrain LCID(1019561) on Victoria Line at At Walthamstow Central>, u'1020270': <tflTube.TFLTrain LCID(1020270) on Victoria Line at Northumberland Park Depot Area>, u'1018676': <tflTube.TFLTrain LCID(1018676) on Victoria Line at Between Warren Street and Oxford Circus>, u'1018480': <tflTube.TFLTrain LCID(1018480) on Victoria Line at At Oxford Circus>, u'1017788': <tflTube.TFLTrain LCID(1017788) on Victoria Line at Between Pimlico and Victoria>, u'1020123': <tflTube.TFLTrain LCID(1020123) on Victoria Line at At Green Park>, u'1016226': <tflTube.TFLTrain LCID(1016226) on Victoria Line at Between Walthamstow Central and Blackhorse Road>, u'1015704': <tflTube.TFLTrain LCID(1015704) on Victoria Line at Departed Highbury & Islington>, u'1019728': <tflTube.TFLTrain LCID(1019728) on Victoria Line at At Seven Sisters Platform 5>, u'1016783': <tflTube.TFLTrain LCID(1016783) on Victoria Line at At Brixton Platform 1>, u'1019976': <tflTube.TFLTrain LCID(1019976) on Victoria Line at Between Finsbury Park and Highbury & Islington>, u'1018094': <tflTube.TFLTrain LCID(1018094) on Victoria Line at At Euston>, u'1019666': <tflTube.TFLTrain LCID(1019666) on Victoria Line at Between Pimlico and Vauxhall>, u'1016351': <tflTube.TFLTrain LCID(1016351) on Victoria Line at Departed Finsbury Park>, u'1018158': <tflTube.TFLTrain LCID(1018158) on Victoria Line at At Stockwell>, u'1017691': <tflTube.TFLTrain LCID(1017691) on Victoria Line at At Platform>}
>>> tfl.map.get(stationcode="OXC").getAllTrains()
{'trains': {u'1018651': <tflTube.TFLTrain LCID(1018651) on Victoria Line at Approaching Tottenham Hale>, u'1017788': <tflTube.TFLTrain LCID(1017788) on Victoria Line at At Victoria>, u'1019728': <tflTube.TFLTrain LCID(1019728) on Victoria Line at Between Seven Sisters and Finsbury Park>, u'1018285': <tflTube.TFLTrain LCID(1018285) on Victoria Line at At Finsbury Park>, u'1016783': <tflTube.TFLTrain LCID(1016783) on Victoria Line at Brixton Area>, u'1019976': <tflTube.TFLTrain LCID(1019976) on Victoria Line at At Highbury & Islington>, u'1019894': <tflTube.TFLTrain LCID(1019894) on Victoria Line at At Brixton Platform 2>, u'1019265': <tflTube.TFLTrain LCID(1019265) on Victoria Line at At Kings Cross St. Pancras>, u'1016226': <tflTube.TFLTrain LCID(1016226) on Victoria Line at At Walthamstow Central>, u'1019444': <tflTube.TFLTrain LCID(1019444) on Victoria Line at At Pimlico>, u'1016438': <tflTube.TFLTrain LCID(1016438) on Victoria Line at At Oxford Circus>, u'1018584': <tflTube.TFLTrain LCID(1018584) on Victoria Line at Between Warren Street and Euston>, u'1020123': <tflTube.TFLTrain LCID(1020123) on Victoria Line at Approaching Oxford Circus>, u'1019561': <tflTube.TFLTrain LCID(1019561) on Victoria Line at At Walthamstow Central>, u'1020270': <tflTube.TFLTrain LCID(1020270) on Victoria Line at Between Northumberland Park Depot and Seven Sisters>, u'1018158': <tflTube.TFLTrain LCID(1018158) on Victoria Line at Between Stockwell and Vauxhall>}}
>>> tfl.map.get(linecode="B", stationcode="OXC").getAllTrains()
{u'1020241': <tflTube.TFLTrain LCID(1020241) on Bakerloo Line at Approaching Paddington>, u'1019966': <tflTube.TFLTrain LCID(1019966) on Bakerloo Line at Between Regents Park and Oxford Circus>, u'1020119': <tflTube.TFLTrain LCID(1020119) on Bakerloo Line at At Embankment Platform 5>, u'1019579': <tflTube.TFLTrain LCID(1019579) on Bakerloo Line at Queen's Park North Sidings>, u'1020129': <tflTube.TFLTrain LCID(1020129) on Bakerloo Line at At Waterloo Platform 3>, u'1019713': <tflTube.TFLTrain LCID(1019713) on Bakerloo Line at At Queen's Park Platform 2>, u'1019521': <tflTube.TFLTrain LCID(1019521) on Bakerloo Line at At Marylebone Platform 2>, u'1019884': <tflTube.TFLTrain LCID(1019884) on Bakerloo Line at At Elephant & Castle Platform 3>}
'''
pass
| 119.12069
| 2,660
| 0.760602
| 977
| 6,909
| 5.376663
| 0.16479
| 0.148487
| 0.188083
| 0.134019
| 0.529793
| 0.455168
| 0.429659
| 0.397868
| 0.348563
| 0.348563
| 0
| 0.121715
| 0.118831
| 6,909
| 57
| 2,661
| 121.210526
| 0.74113
| 0.915473
| 0
| 0
| 0
| 0
| 0.007299
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.214286
| false
| 0.071429
| 0.285714
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
221791e45f3c5253e5341fee0fc3f259e19d3ecc
| 27,476
|
py
|
Python
|
app/agq/clause.py
|
GaganCJ/QuestionGeneratorApp
|
e9c062b512920a579d6c2a56172320c6fbae4aa2
|
[
"Unlicense"
] | null | null | null |
app/agq/clause.py
|
GaganCJ/QuestionGeneratorApp
|
e9c062b512920a579d6c2a56172320c6fbae4aa2
|
[
"Unlicense"
] | 2
|
2018-11-20T14:09:20.000Z
|
2018-11-20T16:48:45.000Z
|
app/agq/clause.py
|
GaganCJ/QuestionGeneratorApp
|
e9c062b512920a579d6c2a56172320c6fbae4aa2
|
[
"Unlicense"
] | 2
|
2018-11-19T05:06:27.000Z
|
2018-11-25T05:17:36.000Z
|
import nltk
from app.agq import identification
from app.agq import nonClause
def whom_1(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<TO>+<DT>?<RB.?>*<JJ.?>*<NN.?|PRP|PRP\$|VBG|DT|POS|CD|VBN>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(chunked)):
str1 = ""
str2 = ""
str3 = ""
if j in list1:
for k in range(j):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
for k in range(j + 1, len(chunked)):
if k in list1:
str3 += nonClause.get_chunk(chunked[k])
else:
str3 += (chunked[k][0] + " ")
if chunked[j][1][1] == 'PRP':
str2 = " to whom "
else:
for x in range(len(chunked[j])):
if (chunked[j][x][1] == "NNP" or chunked[j][x][1] == "NNPS" or chunked[j][x][1] == "NNS" or
chunked[j][x][1] == "NN"):
break
for x1 in range(len(ner)):
if ner[x1][0] == chunked[j][x][0]:
if ner[x1][1] == "PERSON":
str2 = " to whom "
elif ner[x1][1] == "LOCATION" or ner[x1][1] == "ORGANISATION":
str2 = " where "
elif ner[x1][1] == "TIME" or ner[x1][1] == "DATE":
str2 = " when "
else:
str2 = "to what "
tok = nltk.word_tokenize(str1)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<EX>?<DT>?<JJ.?>*<NN.?|PRP|PRP\$|POS|IN|DT|CC|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
list2 = identification.chunk_search(str1, chunked1)
if len(list2) != 0:
m = list2[len(list2) - 1]
str4 = nonClause.get_chunk(chunked1[m])
str4 = identification.verbphrase_identify(str4)
str5 = ""
str6 = ""
for k in range(m):
if k in list2:
str5 += nonClause.get_chunk(chunked1[k])
else:
str5 += (chunked1[k][0] + " ")
for k in range(m + 1, len(chunked1)):
if k in list2:
str6 += nonClause.get_chunk(chunked1[k])
else:
str6 += (chunked1[k][0] + " ")
st = str5 + str2 + str4 + str6 + str3
for l in range(num + 1, len(segment_set)):
st += ("," + segment_set[l])
st += '?'
st = identification.postprocess(st)
# st = 'Q.' + st
list3.append(st)
return list3
def whom_2(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<IN>+<DT>?<RB.?>*<JJ.?>*<NN.?|PRP|PRP\$|POS|VBG|DT|CD|VBN>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(chunked)):
str1 = ""
str2 = ""
str3 = ""
if j in list1:
for k in range(j):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
for k in range(j + 1, len(chunked)):
if k in list1:
str3 += nonClause.get_chunk(chunked[k])
else:
str3 += (chunked[k][0] + " ")
if chunked[j][1][1] == 'PRP':
str2 = " " + chunked[j][0][0] + " whom "
else:
for x in range(len(chunked[j])):
if (chunked[j][x][1] == "NNP" or chunked[j][x][1] == "NNPS" or chunked[j][x][1] == "NNS" or
chunked[j][x][1] == "NN"):
break
for x1 in range(len(ner)):
if ner[x1][0] == chunked[j][x][0]:
if ner[x1][1] == "PERSON":
str2 = " " + chunked[j][0][0] + " whom "
elif ner[x1][1] == "LOCATION" or ner[x1][1] == "ORGANISATION":
str2 = " where "
elif ner[x1][1] == "TIME" or ner[x1][1] == "DATE":
str2 = " when "
else:
str2 = " " + chunked[j][0][0] + " what "
tok = nltk.word_tokenize(str1)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<EX>?<DT>?<JJ.?>*<NN.?|PRP|PRP\$|POS|IN|DT|CC|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
list2 = identification.chunk_search(str1, chunked1)
if len(list2) != 0:
m = list2[len(list2) - 1]
str4 = nonClause.get_chunk(chunked1[m])
str4 = identification.verbphrase_identify(str4)
str5 = ""
str6 = ""
for k in range(m):
if k in list2:
str5 += nonClause.get_chunk(chunked1[k])
else:
str5 += (chunked1[k][0] + " ")
for k in range(m + 1, len(chunked1)):
if k in list2:
str6 += nonClause.get_chunk(chunked1[k])
else:
str6 += (chunked1[k][0] + " ")
st = str5 + str2 + str4 + str6 + str3
for l in range(num + 1, len(segment_set)):
st += ("," + segment_set[l])
st += '?'
st = identification.postprocess(st)
# st = 'Q.' + st
list3.append(st)
return list3
def whom_3(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<VB.?|MD|RP>+<DT>?<RB.?>*<JJ.?>*<NN.?|PRP|PRP\$|POS|VBG|DT|CD|VBN>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(chunked)):
str1 = ""
str2 = ""
str3 = ""
if j in list1:
for k in range(j):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
for k in range(j + 1, len(chunked)):
if k in list1:
str3 += nonClause.get_chunk(chunked[k])
else:
str3 += (chunked[k][0] + " ")
if chunked[j][1][1] == 'PRP':
str2 = " whom "
else:
for x in range(len(chunked[j])):
if (chunked[j][x][1] == "NNP" or chunked[j][x][1] == "NNPS" or chunked[j][x][1] == "NNS" or
chunked[j][x][1] == "NN"):
break
for x1 in range(len(ner)):
if ner[x1][0] == chunked[j][x][0]:
if ner[x1][1] == "PERSON":
str2 = " whom "
elif ner[x1][1] == "LOCATION" or ner[x1][1] == "ORGANISATION":
str2 = " what "
elif ner[x1][1] == "TIME" or ner[x1][1] == "DATE":
str2 = " what time "
else:
str2 = " what "
strx = nonClause.get_chunk(chunked[j])
tok = nltk.word_tokenize(strx)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<VB.?|MD>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
strx = nonClause.get_chunk(chunked1[0])
str1 += strx
tok = nltk.word_tokenize(str1)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<EX>?<DT>?<JJ.?>*<NN.?|PRP|PRP\$|POS|IN|DT|CC|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
list2 = identification.chunk_search(str1, chunked1)
if len(list2) != 0:
m = list2[len(list2) - 1]
str4 = nonClause.get_chunk(chunked1[m])
str4 = identification.verbphrase_identify(str4)
str5 = ""
str6 = ""
for k in range(m):
if k in list2:
str5 += nonClause.get_chunk(chunked1[k])
else:
str5 += (chunked1[k][0] + " ")
for k in range(m + 1, len(chunked1)):
if k in list2:
str6 += nonClause.get_chunk(chunked1[k])
else:
str6 += (chunked1[k][0] + " ")
st = str5 + str2 + str4 + str6 + str3
for l in range(num + 1, len(segment_set)):
st += ("," + segment_set[l])
st += '?'
st = identification.postprocess(st)
# st = 'Q.' + st
list3.append(st)
return list3
def whose(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<DT|NN.?>*<PRP\$|POS>+<RB.?>*<JJ.?>*<NN.?|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for i in range(len(chunked)):
if i in list1:
str1 = ""
str3 = ""
str2 = ""
for k in range(i):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
str1 += " whose "
for k in range(i + 1, len(chunked)):
if k in list1:
str3 += nonClause.get_chunk(chunked[k])
else:
str3 += (chunked[k][0] + " ")
if chunked[i][1][1] == 'POS':
for k in range(2, len(chunked[i])):
str2 += (chunked[i][k][0] + " ")
if chunked[i][0][1] == 'PRP$':
for k in range(1, len(chunked[i])):
str2 += (chunked[i][k][0] + " ")
str2 = str1 + str2 + str3
str4 = ""
for l in range(0, len(segment_set)):
if l < num:
str4 += (segment_set[l] + ",")
if l > num:
str2 += ("," + segment_set[l])
str2 = str4 + str2
str2 += '?'
str2 = identification.postprocess(str2)
# str2 = 'Q.' + str2
list3.append(str2)
return list3
def what_to_do(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<TO>+<VB|VBP|RP>+<DT>?<RB.?>*<JJ.?>*<NN.?|PRP|PRP\$|POS|VBG|DT>*}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(chunked)):
str1 = ""
str2 = ""
str3 = ""
if j in list1:
for k in range(j):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
for k in range(j + 1, len(chunked)):
if k in list1:
str3 += nonClause.get_chunk(chunked[k])
else:
str3 += (chunked[k][0] + " ")
ls = nonClause.get_chunk(chunked[j])
tok = nltk.word_tokenize(ls)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<DT>?<RB.?>*<JJ.?>*<NN.?|PRP|PRP\$|POS|VBG|DT>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked2 = chunkparser.parse(tag)
lis = identification.chunk_search(ls, chunked2)
if len(lis) != 0:
x = lis[len(lis) - 1]
ls1 = nonClause.get_chunk(chunked2[x])
index = ls.find(ls1)
str2 = " " + ls[0:index]
else:
str2 = " to do "
tok = nltk.word_tokenize(str1)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<EX>?<DT>?<JJ.?>*<NN.?|PRP|PRP\$|POS|IN|DT|CC|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
list2 = identification.chunk_search(str1, chunked1)
if len(list2) != 0:
m = list2[len(list2) - 1]
str4 = nonClause.get_chunk(chunked1[m])
str4 = identification.verbphrase_identify(str4)
str5 = ""
str6 = ""
for k in range(m):
if k in list2:
str5 += nonClause.get_chunk(chunked1[k])
else:
str5 += (chunked1[k][0] + " ")
for k in range(m + 1, len(chunked1)):
if k in list2:
str6 += nonClause.get_chunk(chunked1[k])
else:
str6 += (chunked1[k][0] + " ")
if chunked2[j][1][1] == 'PRP':
tr = " whom "
else:
for x in range(len(chunked[j])):
if (chunked[j][x][1] == "NNP" or chunked[j][x][1] == "NNPS" or chunked[j][x][1] == "NNS" or
chunked[j][x][1] == "NN"):
break
for x1 in range(len(ner)):
if ner[x1][0] == chunked[j][x][0]:
if ner[x1][1] == "PERSON":
tr = " whom "
elif ner[x1][1] == "LOCATION" or ner[x1][1] == "ORGANISATION":
tr = " where "
elif ner[x1][1] == "TIME" or ner[x1][1] == "DATE":
tr = " when "
else:
tr = " what "
st = str5 + tr + str4 + str2 + str6 + str3
for l in range(num + 1, len(segment_set)):
st += ("," + segment_set[l])
st += '?'
st = identification.postprocess(st)
# st = 'Q.' + st
list3.append(st)
return list3
def who(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<EX>?<DT>?<JJ.?>*<NN.?|PRP|PRP\$|POS|IN|DT|CC|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(list1)):
m = list1[j]
str1 = ""
for k in range(m + 1, len(chunked)):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
str2 = nonClause.get_chunk(chunked[m])
tok = nltk.word_tokenize(str2)
tag = nltk.pos_tag(tok)
for m11 in range(len(tag)):
if tag[m11][1] == 'NNP' or tag[m11][1] == 'NNPS' or tag[m11][1] == 'NNS' or tag[m11][1] == 'NN':
break
s11 = ' who '
for m12 in range(len(ner)):
if ner[m12][0] == tag[m11][0]:
if ner[m12][1] == 'LOCATION':
s11 = ' which place '
elif ner[m12][1] == 'ORGANISATION':
s11 = ' who '
elif ner[m12][1] == 'DATE' or ner[m12][1] == 'TIME':
s11 = ' what time '
else:
s11 = ' who '
gram = r"""chunk:{<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
list2 = identification.chunk_search(str2, chunked1)
if len(list2) != 0:
str2 = nonClause.get_chunk(chunked1[list2[0]])
str2 = s11 + str2
for k in range(list2[0] + 1, len(chunked1)):
if k in list2:
str2 += nonClause.get_chunk(chunked[k])
else:
str2 += (chunked[k][0] + " ")
str2 += (" " + str1)
tok_1 = nltk.word_tokenize(str2)
str2 = ""
for h in range(len(tok_1)):
if tok_1[h] == "am":
str2 += " is "
else:
str2 += (tok_1[h] + " ")
for l in range(num + 1, len(segment_set)):
str2 += ("," + segment_set[l])
str2 += '?'
str2 = identification.postprocess(str2)
# str2 = 'Q.' + str2
list3.append(str2)
return list3
def howmuch_2(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<\$>*<CD>+<MD>?<VB|VBD|VBG|VBP|VBN|VBZ|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(list1)):
m = list1[j]
str1 = ""
for k in range(m + 1, len(chunked)):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
str2 = nonClause.get_chunk(chunked[m])
tok = nltk.word_tokenize(str2)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
s11 = ' how much '
list2 = identification.chunk_search(str2, chunked1)
if len(list2) != 0:
str2 = nonClause.get_chunk(chunked1[list2[0]])
str2 = s11 + str2
for k in range(list2[0] + 1, len(chunked1)):
if k in list2:
str2 += nonClause.get_chunk(chunked[k])
else:
str2 += (chunked[k][0] + " ")
str2 += (" " + str1)
tok_1 = nltk.word_tokenize(str2)
str2 = ""
for h in range(len(tok_1)):
if tok_1[h] == "am":
str2 += " is "
else:
str2 += (tok_1[h] + " ")
for l in range(num + 1, len(segment_set)):
str2 += ("," + segment_set[l])
str2 += '?'
str2 = identification.postprocess(str2)
# str2 = 'Q.' + str2
list3.append(str2)
return list3
def howmuch_1(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<IN>+<\$>?<CD>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(chunked)):
str1 = ""
str2 = ""
str3 = ""
if j in list1:
for k in range(j):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
for k in range(j + 1, len(chunked)):
if k in list1:
str3 += nonClause.get_chunk(chunked[k])
else:
str3 += (chunked[k][0] + " ")
str2 = ' ' + chunked[j][0][0] + ' how much '
tok = nltk.word_tokenize(str1)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<EX>?<DT>?<JJ.?>*<NN.?|PRP|PRP\$|POS|IN|DT|CC|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
list2 = identification.chunk_search(str1, chunked1)
if len(list2) != 0:
m = list2[len(list2) - 1]
str4 = nonClause.get_chunk(chunked1[m])
str4 = identification.verbphrase_identify(str4)
str5 = ""
str6 = ""
for k in range(m):
if k in list2:
str5 += nonClause.get_chunk(chunked1[k])
else:
str5 += (chunked1[k][0] + " ")
for k in range(m + 1, len(chunked1)):
if k in list2:
str6 += nonClause.get_chunk(chunked1[k])
else:
str6 += (chunked1[k][0] + " ")
st = str5 + str2 + str4 + str6 + str3
for l in range(num + 1, len(segment_set)):
st += ("," + segment_set[l])
st += '?'
st = identification.postprocess(st)
# st = 'Q.' + st
list3.append(st)
return list3
def howmuch_3(segment_set, num, ner):
tok = nltk.word_tokenize(segment_set[num])
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<MD>?<VB|VBD|VBG|VBP|VBN|VBZ>+<IN|TO>?<PRP|PRP\$|NN.?>?<\$>*<CD>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked = chunkparser.parse(tag)
list1 = identification.chunk_search(segment_set[num], chunked)
list3 = []
if len(list1) != 0:
for j in range(len(chunked)):
str1 = ""
str2 = ""
str3 = ""
if j in list1:
for k in range(j):
if k in list1:
str1 += nonClause.get_chunk(chunked[k])
else:
str1 += (chunked[k][0] + " ")
for k in range(j + 1, len(chunked)):
if k in list1:
str3 += nonClause.get_chunk(chunked[k])
else:
str3 += (chunked[k][0] + " ")
strx = nonClause.get_chunk(chunked[j])
tok = nltk.word_tokenize(strx)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<MD>?<VB|VBD|VBG|VBP|VBN|VBZ>+<IN|TO>?<PRP|PRP\$|NN.?>?}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
strx = nonClause.get_chunk(chunked1[0])
str1 += (" " + strx)
str2 = ' how much '
tok = nltk.word_tokenize(str1)
tag = nltk.pos_tag(tok)
gram = r"""chunk:{<EX>?<DT>?<JJ.?>*<NN.?|PRP|PRP\$|POS|IN|DT|CC|VBG|VBN>+<RB.?>*<VB.?|MD|RP>+}"""
chunkparser = nltk.RegexpParser(gram)
chunked1 = chunkparser.parse(tag)
list2 = identification.chunk_search(str1, chunked1)
if len(list2) != 0:
m = list2[len(list2) - 1]
str4 = nonClause.get_chunk(chunked1[m])
str4 = identification.verbphrase_identify(str4)
str5 = ""
str6 = ""
for k in range(m):
if k in list2:
str5 += nonClause.get_chunk(chunked1[k])
else:
str5 += (chunked1[k][0] + " ")
for k in range(m + 1, len(chunked1)):
if k in list2:
str6 += nonClause.get_chunk(chunked1[k])
else:
str6 += (chunked1[k][0] + " ")
st = str5 + str2 + str4 + str6 + str3
for l in range(num + 1, len(segment_set)):
st += ("," + segment_set[l])
st += '?'
st = identification.postprocess(st)
# st = 'Q.' + st
list3.append(st)
return list3
| 38.97305
| 120
| 0.386956
| 2,753
| 27,476
| 3.798765
| 0.041409
| 0.041499
| 0.074775
| 0.033658
| 0.929241
| 0.9214
| 0.916428
| 0.914611
| 0.90983
| 0.902945
| 0
| 0.0463
| 0.477253
| 27,476
| 704
| 121
| 39.028409
| 0.681821
| 0.005314
| 0
| 0.875657
| 0
| 0.024518
| 0.068192
| 0.047415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015762
| false
| 0
| 0.005254
| 0
| 0.036778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97d7fe403d06e57af45d2ce591b792540b6687d1
| 47,641
|
py
|
Python
|
testcases/broker_test.py
|
tibkiss/pyalgotrade
|
4979315281c362dcba2e6d53da27dc4a7377ebec
|
[
"Apache-2.0"
] | 2
|
2015-04-03T10:29:14.000Z
|
2017-01-21T05:55:00.000Z
|
testcases/broker_test.py
|
tibkiss/pyalgotrade
|
4979315281c362dcba2e6d53da27dc4a7377ebec
|
[
"Apache-2.0"
] | null | null | null |
testcases/broker_test.py
|
tibkiss/pyalgotrade
|
4979315281c362dcba2e6d53da27dc4a7377ebec
|
[
"Apache-2.0"
] | null | null | null |
# PyAlgoTrade
#
# Copyright 2011 Gabriel Martin Becedillas Ruiz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <gabriel.becedillas@gmail.com>
"""
import pytest
import unittest
import datetime
from pyalgotrade import broker
from pyalgotrade.broker import backtesting
from pyalgotrade import bar
from pyalgotrade import barfeed
class Callback:
def __init__(self):
self.eventCount = 0
def onOrderUpdated(self, broker_, order):
self.eventCount += 1
class BaseTestCase(unittest.TestCase):
TestInstrument = "orcl"
def setUp(self):
self.__currMinutes = 0
self.__nextDateTime = datetime.datetime(2011, 1, 2)
def __getNextDateTime(self, switchDay):
if switchDay:
self.__nextDateTime = self.__nextDateTime + datetime.timedelta(days=1)
self.__currMinutes = 0
else:
self.__currMinutes += 1
return self.__nextDateTime + datetime.timedelta(minutes=self.__currMinutes)
def buildBars(self, openPrice, highPrice, lowPrice, closePrice, sessionClose = False):
ret = {}
dateTime = self.__getNextDateTime(sessionClose)
bar_ = bar.Bar(dateTime, openPrice, highPrice, lowPrice, closePrice, closePrice*10, closePrice)
bar_.setSessionClose(sessionClose)
ret[BaseTestCase.TestInstrument] = bar_
return bar.Bars(ret)
class BrokerTestCase(BaseTestCase):
def testRegressionGetActiveOrders(self):
activeOrders = []
def onOrderUpdated(broker, order):
activeOrders.append(len(broker.getActiveOrders()))
brk = backtesting.Broker(1000, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
brk.getOrderUpdatedEvent().subscribe(onOrderUpdated)
brk.placeOrder(brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1))
brk.placeOrder(brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1))
brk.onBars(self.buildBars(10, 15, 8, 12))
self.assertEqual(brk.getCash(), 1000 - 10*2)
self.assertEqual(activeOrders[0], 1)
self.assertEqual(activeOrders[1], 0)
class MarketOrderTestCase(BaseTestCase):
def testBuyAndSell(self):
brk = backtesting.Broker(11, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 1
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
# Sell
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 11
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testFailToBuy(self):
brk = backtesting.Broker(5, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
# Fail to buy. No money.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isAccepted()
assert order.getExecutionInfo() == None
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 0
# Fail to buy. No money. Canceled due to session close.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.onBars(self.buildBars(11, 15, 8, 12, True))
assert order.isCanceled()
assert order.getExecutionInfo() == None
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testBuy_GTC(self):
brk = backtesting.Broker(5, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
order.setGoodTillCanceled(True)
# Fail to buy. No money.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.placeOrder(order)
# Set sessionClose to true test that the order doesn't get canceled.
brk.onBars(self.buildBars(10, 15, 8, 12, True))
assert order.isAccepted()
assert order.getExecutionInfo() == None
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 0
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.onBars(self.buildBars(2, 15, 1, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 2
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 3
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
def testBuyAndSellInTwoSteps(self):
brk = backtesting.Broker(20.4, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 2)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert round(brk.getCash(), 1) == 0.4
assert brk.getShares(BaseTestCase.TestInstrument) == 2
# Sell
order = brk.createMarketOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert round(brk.getCash(), 1) == 10.4
assert brk.getShares(BaseTestCase.TestInstrument) == 1
# Sell again
order = brk.createMarketOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(11, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 11
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert round(brk.getCash(), 1) == 21.4
assert brk.getShares(BaseTestCase.TestInstrument) == 0
def testPortfolioValue(self):
brk = backtesting.Broker(11, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 1
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert brk.getEquityWithBars(self.buildBars(11, 11, 11, 11)) == 11 + 1
assert brk.getEquityWithBars(self.buildBars(1, 1, 1, 1)) == 1 + 1
def testBuyWithCommission(self):
brk = backtesting.Broker(1020, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE), commission=backtesting.FixedCommission(10))
# Buy
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 100)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 10
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 10
assert brk.getShares(BaseTestCase.TestInstrument) == 100
def testSellShort_1(self):
brk = backtesting.Broker(1000, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Short sell
order = brk.createMarketOrder(broker.Order.Action.SELL_SHORT, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(200, 200, 200, 200))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 1200
assert brk.getShares(BaseTestCase.TestInstrument) == -1
assert brk.getEquityWithBars(self.buildBars(100, 100, 100, 100)) == 1000 + 100
assert brk.getEquityWithBars(self.buildBars(0, 0, 0, 0)) == 1000 + 200
assert brk.getEquityWithBars(self.buildBars(30, 30, 30, 30)) == 1000 + 170
# Buy at the same price.
order = brk.createMarketOrder(broker.Order.Action.BUY_TO_COVER, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(200, 200, 200, 200))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 1000
assert brk.getShares(BaseTestCase.TestInstrument) == 0
def testSellShort_2(self):
brk = backtesting.Broker(1000, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Short sell 1
order = brk.createMarketOrder(broker.Order.Action.SELL_SHORT, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(100, 100, 100, 100))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert brk.getCash() == 1100
assert brk.getShares(BaseTestCase.TestInstrument) == -1
assert brk.getEquityWithBars(self.buildBars(100, 100, 100, 100)) == 1000
assert brk.getEquityWithBars(self.buildBars(0, 0, 0, 0)) == 1000 + 100
assert brk.getEquityWithBars(self.buildBars(70, 70, 70, 70)) == 1000 + 30
assert brk.getEquityWithBars(self.buildBars(200, 200, 200, 200)) == 1000 - 100
# Buy 2 and earn 50
order = brk.createMarketOrder(broker.Order.Action.BUY_TO_COVER, BaseTestCase.TestInstrument, 2)
brk.placeOrder(order)
brk.onBars(self.buildBars(50, 50, 50, 50))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert brk.getCash() == 1000 # +50 from short sell operation, -50 from buy operation.
assert brk.getEquityWithBars(self.buildBars(50, 50, 50, 50)) == 1000 + 50
assert brk.getEquityWithBars(self.buildBars(70, 70, 70, 70)) == 1000 + 50 + 20
# Sell 1 and earn 50
order = brk.createMarketOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(100, 100, 100, 100))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert brk.getEquityWithBars(self.buildBars(70, 70, 70, 70)) == 1000 + 50 + 50
def testSellShort_3(self):
brk = backtesting.Broker(100, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy 1
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(100, 100, 100, 100))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert brk.getCash() == 0
# Sell 2
order = brk.createMarketOrder(broker.Order.Action.SELL_SHORT, BaseTestCase.TestInstrument, 2)
brk.placeOrder(order)
brk.onBars(self.buildBars(100, 100, 100, 100))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert brk.getShares(BaseTestCase.TestInstrument) == -1
assert brk.getCash() == 200
# Buy 1
order = brk.createMarketOrder(broker.Order.Action.BUY_TO_COVER, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(100, 100, 100, 100))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 0
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert brk.getCash() == 100
def testSellShortWithCommission(self):
sharePrice = 100
commission = 10
brk = backtesting.Broker(1010, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE), commission=backtesting.FixedCommission(commission))
# Sell 10 shares
order = brk.createMarketOrder(broker.Order.Action.SELL_SHORT, BaseTestCase.TestInstrument, 10)
brk.placeOrder(order)
brk.onBars(self.buildBars(sharePrice, sharePrice, sharePrice, sharePrice))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 10
assert brk.getCash() == 2000
assert brk.getShares(BaseTestCase.TestInstrument) == -10
# Buy the 10 shares sold short plus 9 extra
order = brk.createMarketOrder(broker.Order.Action.BUY_TO_COVER, BaseTestCase.TestInstrument, 19)
brk.placeOrder(order)
brk.onBars(self.buildBars(sharePrice, sharePrice, sharePrice, sharePrice))
assert order.isFilled()
assert order.getExecutionInfo().getCommission() == 10
assert brk.getShares(BaseTestCase.TestInstrument) == 9
assert brk.getCash() == sharePrice - commission
def testCancel(self):
brk = backtesting.Broker(100, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.cancelOrder(order)
brk.onBars(self.buildBars(10, 10, 10, 10))
assert order.isCanceled()
def testReSubmit(self):
brk = backtesting.Broker(1000, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1, False)
brk.placeOrder(order)
assert not order.isDirty()
order.setFillOnClose(True)
assert order.isDirty()
brk.placeOrder(order) # Re-submit the order after changing it.
assert not order.isDirty()
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 12
class LimitOrderTestCase(BaseTestCase):
def testBuyAndSell_HitTargetPrice(self):
brk = backtesting.Broker(20, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 10, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(12, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 10
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
# Sell
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 15, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 17, 8, 10))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 15
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 25
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testBuyAndSell_GetBetterPrice(self):
brk = backtesting.Broker(20, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 14, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(12, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 12
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 8
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
# Sell
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 15, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(16, 17, 8, 10))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 16
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 24
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testBuyAndSell_GappingBars(self):
brk = backtesting.Broker(20, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Bar is below the target price.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 20, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 10))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 10
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
# Sell. Bar is above the target price.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 30, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(35, 40, 32, 35))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 35
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 45
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testFailToBuy(self):
brk = backtesting.Broker(5, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
order = brk.createLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 5, 1)
# Fail to buy (couldn't get specific price).
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isAccepted()
assert order.getExecutionInfo() == None
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 0
# Fail to buy (couldn't get specific price). Canceled due to session close.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.onBars(self.buildBars(11, 15, 8, 12, True))
assert order.isCanceled()
assert order.getExecutionInfo() == None
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testBuy_GTC(self):
brk = backtesting.Broker(10, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
order = brk.createLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 4, 2)
order.setGoodTillCanceled(True)
# Fail to buy (couldn't get specific price).
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.placeOrder(order)
# Set sessionClose to true test that the order doesn't get canceled.
brk.onBars(self.buildBars(10, 15, 8, 12, True))
assert order.isAccepted()
assert order.getExecutionInfo() == None
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 10
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 0
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.onBars(self.buildBars(2, 15, 1, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 2
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 6
assert brk.getShares(BaseTestCase.TestInstrument) == 2
assert cb.eventCount == 1
def testReSubmit(self):
brk = backtesting.Broker(10, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
order = brk.createLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1, 1)
order.setGoodTillCanceled(True)
# Fail to buy (couldn't get specific price).
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.placeOrder(order)
assert not order.isDirty()
order.setLimitPrice(4)
assert order.isDirty()
brk.placeOrder(order)
assert not order.isDirty()
order.setQuantity(2)
assert order.isDirty()
brk.placeOrder(order)
assert not order.isDirty()
# Set sessionClose to true test that the order doesn't get canceled.
brk.onBars(self.buildBars(10, 15, 8, 12, True))
assert order.isAccepted()
assert order.getExecutionInfo() == None
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 10
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 0
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
brk.onBars(self.buildBars(2, 15, 1, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 2
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 6
assert brk.getShares(BaseTestCase.TestInstrument) == 2
assert cb.eventCount == 1
class StopOrderTestCase(BaseTestCase):
def testLongPosStopLoss(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
# Create stop loss order.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 9, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 10, 12)) # Stop loss not hit.
assert not order.isFilled()
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 0
brk.onBars(self.buildBars(10, 15, 8, 12)) # Stop loss hit.
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 9
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5+9
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testLongPosStopLoss_GappingBars(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
# Create stop loss order.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 9, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 10, 12)) # Stop loss not hit.
assert not order.isFilled()
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 0
brk.onBars(self.buildBars(5, 8, 4, 7)) # Stop loss hit.
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 5
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5+5 # Fill the stop loss order at open price.
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testShortPosStopLoss(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Sell short
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.SELL_SHORT, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 15+10
assert brk.getShares(BaseTestCase.TestInstrument) == -1
assert cb.eventCount == 1
# Create stop loss order.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopOrder(broker.Order.Action.BUY_TO_COVER, BaseTestCase.TestInstrument, 11, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(8, 10, 7, 9)) # Stop loss not hit.
assert not order.isFilled()
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 15+10
assert brk.getShares(BaseTestCase.TestInstrument) == -1
assert cb.eventCount == 0
brk.onBars(self.buildBars(10, 15, 8, 12)) # Stop loss hit.
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 11
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 15-1
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testShortPosStopLoss_GappingBars(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Sell short
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.SELL_SHORT, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 15+10
assert brk.getShares(BaseTestCase.TestInstrument) == -1
assert cb.eventCount == 1
# Create stop loss order.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopOrder(broker.Order.Action.BUY_TO_COVER, BaseTestCase.TestInstrument, 11, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(8, 10, 7, 9)) # Stop loss not hit.
assert not order.isFilled()
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 15+10
assert brk.getShares(BaseTestCase.TestInstrument) == -1
assert cb.eventCount == 0
brk.onBars(self.buildBars(15, 20, 13, 14)) # Stop loss hit.
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 15
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 15-5
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
def testReSubmit(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createMarketOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, 1)
brk.placeOrder(order)
brk.onBars(self.buildBars(10, 15, 8, 12))
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
assert order.getExecutionInfo().getCommission() == 0
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 1
# Create stop loss order.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, 2, 1)
brk.placeOrder(order)
assert not order.isDirty()
order.setStopPrice(9)
assert order.isDirty()
brk.placeOrder(order)
assert not order.isDirty()
brk.onBars(self.buildBars(10, 15, 10, 12)) # Stop loss not hit.
assert not order.isFilled()
assert len(brk.getPendingOrders()) == 1
assert brk.getCash() == 5
assert brk.getShares(BaseTestCase.TestInstrument) == 1
assert cb.eventCount == 0
brk.onBars(self.buildBars(10, 15, 8, 12)) # Stop loss hit.
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 9
assert len(brk.getPendingOrders()) == 0
assert brk.getCash() == 5+9
assert brk.getShares(BaseTestCase.TestInstrument) == 0
assert cb.eventCount == 1
class StopLimitOrderTestCase(BaseTestCase):
def testFillOpen(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 10. Buy <= 12.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=10, limitPrice=12, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(8, 9, 7, 8))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(13, 15, 13, 14))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit (bars include the price). Fill at open price.
brk.onBars(self.buildBars(11, 15, 10, 14))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 11
# Sell. Stop <= 8. Sell >= 6.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=8, limitPrice=6, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(9, 10, 9, 10))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(4, 5, 3, 4))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit (bars include the price). Fill at open price.
brk.onBars(self.buildBars(7, 8, 6, 7))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 7
def testFillOpen_GappingBars(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 10. Buy <= 12.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=10, limitPrice=12, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(8, 9, 7, 8))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(13, 18, 13, 17))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit (bars don't include the price). Fill at open price.
brk.onBars(self.buildBars(7, 9, 6, 8))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 7
# Sell. Stop <= 8. Sell >= 6.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=8, limitPrice=6, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(9, 10, 9, 10))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(4, 5, 3, 4))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit (bars don't include the price). Fill at open price.
brk.onBars(self.buildBars(10, 12, 8, 10))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
def testFillLimit(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 10. Buy <= 12.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=10, limitPrice=12, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(8, 9, 7, 8))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(13, 15, 13, 14))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at limit price.
brk.onBars(self.buildBars(13, 15, 10, 14))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 12
# Sell. Stop <= 8. Sell >= 6.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=8, limitPrice=6, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(9, 10, 9, 10))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(4, 5, 3, 4))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at limit price.
brk.onBars(self.buildBars(5, 7, 5, 6))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 6
def testHitStopAndLimit(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 10. Buy <= 12.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=10, limitPrice=12, quantity=1)
brk.placeOrder(order)
# Stop price hit. Limit price hit. Fill at stop price.
brk.onBars(self.buildBars(9, 15, 8, 14))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
# Sell. Stop <= 8. Sell >= 6.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=8, limitPrice=6, quantity=1)
brk.placeOrder(order)
# Stop price hit. Limit price hit. Fill at stop price.
brk.onBars(self.buildBars(9, 10, 5, 8))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 8
def testInvertedPrices_FillOpen(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 12. Buy <= 10.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=12, limitPrice=10, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(8, 9, 7, 8))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(11, 12, 10.5, 11))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at open price.
brk.onBars(self.buildBars(9, 15, 8, 14))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 9
# Sell. Stop <= 6. Sell >= 8.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=6, limitPrice=8, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(9, 10, 9, 10))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(7, 7, 6, 7))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at open price.
brk.onBars(self.buildBars(9, 10, 8, 9))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 9
def testInvertedPrices_FillOpen_GappingBars(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 12. Buy <= 10.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=12, limitPrice=10, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(8, 9, 7, 8))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(11, 12, 10.5, 11))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at open price.
brk.onBars(self.buildBars(7, 9, 6, 8))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 7
# Sell. Stop <= 6. Sell >= 8.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=6, limitPrice=8, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(9, 10, 9, 10))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(7, 7, 6, 7))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at open price.
brk.onBars(self.buildBars(10, 10, 9, 9))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
def testInvertedPrices_FillLimit(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 12. Buy <= 10.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=12, limitPrice=10, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(8, 9, 7, 8))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(11, 12, 10.5, 11))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at limit price.
brk.onBars(self.buildBars(11, 13, 8, 9))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
# Sell. Stop <= 6. Sell >= 8.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=6, limitPrice=8, quantity=1)
brk.placeOrder(order)
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(9, 10, 9, 10))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(7, 7, 6, 7))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit. Fill at limit price.
brk.onBars(self.buildBars(7, 10, 6, 9))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 8
def testInvertedPrices_HitStopAndLimit(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 12. Buy <= 10.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=12, limitPrice=10, quantity=1)
brk.placeOrder(order)
# Stop price hit. Limit price hit. Fill at limit price.
brk.onBars(self.buildBars(9, 15, 8, 14))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 10
# Sell. Stop <= 6. Sell >= 8.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.SELL, BaseTestCase.TestInstrument, stopPrice=6, limitPrice=8, quantity=1)
brk.placeOrder(order)
# Stop price hit. Limit price hit. Fill at limit price.
brk.onBars(self.buildBars(6, 10, 5, 7))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 8
def testReSubmit(self):
brk = backtesting.Broker(15, barFeed=barfeed.BarFeed(barfeed.Frequency.MINUTE))
# Buy. Stop >= 10. Buy <= 12.
cb = Callback()
brk.getOrderUpdatedEvent().subscribe(cb.onOrderUpdated)
order = brk.createStopLimitOrder(broker.Order.Action.BUY, BaseTestCase.TestInstrument, stopPrice=1, limitPrice=1, quantity=1)
brk.placeOrder(order)
assert not order.isDirty()
order.setLimitPrice(12)
assert order.isDirty()
brk.placeOrder(order)
assert not order.isDirty()
order.setStopPrice(10)
assert order.isDirty()
brk.placeOrder(order)
assert not order.isDirty()
# Stop price not hit. Limit price not hit.
brk.onBars(self.buildBars(8, 9, 7, 8))
assert not order.isLimitOrderActive()
assert order.isAccepted()
# Stop price hit. Limit price not hit.
brk.onBars(self.buildBars(13, 15, 13, 14))
assert order.isLimitOrderActive()
assert order.isAccepted()
# Limit price hit (bars include the price). Fill at open price.
brk.onBars(self.buildBars(11, 15, 10, 14))
assert order.isLimitOrderActive()
assert order.isFilled()
assert order.getExecutionInfo().getPrice() == 11
| 42.347556
| 141
| 0.652568
| 5,238
| 47,641
| 5.923444
| 0.051165
| 0.070906
| 0.039385
| 0.066652
| 0.910111
| 0.903407
| 0.893351
| 0.887163
| 0.882586
| 0.865794
| 0
| 0.039572
| 0.233517
| 47,641
| 1,124
| 142
| 42.385231
| 0.810111
| 0.089125
| 0
| 0.818399
| 0
| 0
| 0.000092
| 0
| 0
| 0
| 0
| 0
| 0.491039
| 1
| 0.046595
| false
| 0
| 0.008363
| 0
| 0.066906
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
97ec9ab8668ce197916d039eac7a313fe7ff2f59
| 98
|
py
|
Python
|
menpo/landmark/__init__.py
|
jacksoncsy/menpo
|
3cac491fe30454935ed12fcaa89f453c5f6ec878
|
[
"BSD-3-Clause"
] | null | null | null |
menpo/landmark/__init__.py
|
jacksoncsy/menpo
|
3cac491fe30454935ed12fcaa89f453c5f6ec878
|
[
"BSD-3-Clause"
] | null | null | null |
menpo/landmark/__init__.py
|
jacksoncsy/menpo
|
3cac491fe30454935ed12fcaa89f453c5f6ec878
|
[
"BSD-3-Clause"
] | 1
|
2021-04-14T12:09:00.000Z
|
2021-04-14T12:09:00.000Z
|
from menpo.landmark.base import LandmarkManager, Landmarkable
from menpo.landmark.labels import *
| 32.666667
| 61
| 0.846939
| 12
| 98
| 6.916667
| 0.666667
| 0.216867
| 0.409639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091837
| 98
| 2
| 62
| 49
| 0.932584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3f307f5e5f02aa91a849e2919cdf305968878995
| 41,694
|
py
|
Python
|
System/String/__init__.py
|
Grim-es/udon-pie-auto-completion
|
c2cd86554ed615cdbbb01e19fa40665eafdfaedc
|
[
"MIT"
] | null | null | null |
System/String/__init__.py
|
Grim-es/udon-pie-auto-completion
|
c2cd86554ed615cdbbb01e19fa40665eafdfaedc
|
[
"MIT"
] | null | null | null |
System/String/__init__.py
|
Grim-es/udon-pie-auto-completion
|
c2cd86554ed615cdbbb01e19fa40665eafdfaedc
|
[
"MIT"
] | null | null | null |
from typing import overload
from UdonPie import System
from UdonPie.Undefined import *
class String:
def __new__(cls, arg1=None):
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1):
'''
:param arg1: Undefined variable
:type arg1: SystemCharAsterix.SystemCharAsterix
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1, arg2, arg3):
'''
:param arg1: Undefined variable
:type arg1: SystemCharAsterix.SystemCharAsterix
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1):
'''
:param arg1: Undefined variable
:type arg1: SystemSByteAsterix.SystemSByteAsterix
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1, arg2, arg3):
'''
:param arg1: Undefined variable
:type arg1: SystemSByteAsterix.SystemSByteAsterix
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1, arg2, arg3, arg4):
'''
:param arg1: Undefined variable
:type arg1: SystemSByteAsterix.SystemSByteAsterix
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:param arg4: Undefined variable
:type arg4: SystemTextEncoding.SystemTextEncoding
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1, arg2, arg3):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ctor(arg1, arg2):
'''
:param arg1: Char
:type arg1: System.Char
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def ctor(arg1=None, arg2=None, arg3=None, arg4=None):
pass
@staticmethod
def op_Addition(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def op_Equality(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def op_Inequality(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def get_Empty():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Join(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: StringArray
:type arg2: System.StringArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Join(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: ObjectArray
:type arg2: System.ObjectArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Join(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Undefined variable
:type arg2: SystemCollectionsGenericIEnumerable.SystemCollectionsGenericIEnumerable
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Join(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Undefined variable
:type arg2: SystemCollectionsGenericIEnumerable.SystemCollectionsGenericIEnumerable
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Join(arg1, arg2, arg3, arg4):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: StringArray
:type arg2: System.StringArray
:param arg3: Int32
:type arg3: System.Int32 or int
:param arg4: Int32
:type arg4: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Join(arg1=None, arg2=None, arg3=None, arg4=None):
pass
@staticmethod
@overload
def Equals(arg1):
'''
:param arg1: Object
:type arg1: System.Object
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def Equals(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def Equals(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: StringComparison
:type arg2: System.StringComparison
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def Equals(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def Equals(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:param arg3: StringComparison
:type arg3: System.StringComparison
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def Equals(arg1=None, arg2=None, arg3=None):
pass
@staticmethod
def get_Chars(arg1):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:returns: Char
:rtype: System.Char
'''
pass
@staticmethod
def CopyTo(arg1, arg2, arg3, arg4):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:param arg2: CharArray
:type arg2: System.CharArray
:param arg3: Int32
:type arg3: System.Int32 or int
:param arg4: Int32
:type arg4: System.Int32 or int
'''
pass
@staticmethod
@overload
def ToCharArray():
'''
:returns: CharArray
:rtype: System.CharArray
'''
pass
@staticmethod
@overload
def ToCharArray(arg1, arg2):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: CharArray
:rtype: System.CharArray
'''
pass
@staticmethod
def ToCharArray(arg1=None, arg2=None):
pass
@staticmethod
def IsNullOrEmpty(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def IsNullOrWhiteSpace(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def GetHashCode():
'''
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def get_Length():
'''
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Split(arg1):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:returns: StringArray
:rtype: System.StringArray
'''
pass
@staticmethod
@overload
def Split(arg1, arg2):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: StringArray
:rtype: System.StringArray
'''
pass
@staticmethod
@overload
def Split(arg1, arg2):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: StringSplitOptions
:type arg2: System.StringSplitOptions
:returns: StringArray
:rtype: System.StringArray
'''
pass
@staticmethod
@overload
def Split(arg1, arg2, arg3):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: StringSplitOptions
:type arg3: System.StringSplitOptions
:returns: StringArray
:rtype: System.StringArray
'''
pass
@staticmethod
@overload
def Split(arg1, arg2):
'''
:param arg1: StringArray
:type arg1: System.StringArray
:param arg2: StringSplitOptions
:type arg2: System.StringSplitOptions
:returns: StringArray
:rtype: System.StringArray
'''
pass
@staticmethod
@overload
def Split(arg1, arg2, arg3):
'''
:param arg1: StringArray
:type arg1: System.StringArray
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: StringSplitOptions
:type arg3: System.StringSplitOptions
:returns: StringArray
:rtype: System.StringArray
'''
pass
@staticmethod
def Split(arg1=None, arg2=None, arg3=None):
pass
@staticmethod
@overload
def Substring(arg1):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Substring(arg1, arg2):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Substring(arg1=None, arg2=None):
pass
@staticmethod
@overload
def Trim(arg1):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Trim():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Trim(arg1=None):
pass
@staticmethod
def TrimStart(arg1):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def TrimEnd(arg1):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def IsNormalized():
'''
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def IsNormalized(arg1):
'''
:param arg1: NormalizationForm
:type arg1: System.NormalizationForm
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def IsNormalized(arg1=None):
pass
@staticmethod
@overload
def Normalize():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Normalize(arg1):
'''
:param arg1: NormalizationForm
:type arg1: System.NormalizationForm
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Normalize(arg1=None):
pass
@staticmethod
@overload
def Compare(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:param arg3: Boolean
:type arg3: System.Boolean or bool
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:param arg3: StringComparison
:type arg3: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3, arg4):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:param arg3: CultureInfo
:type arg3: System.CultureInfo
:param arg4: CompareOptions
:type arg4: System.CompareOptions
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3, arg4):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:param arg3: Boolean
:type arg3: System.Boolean or bool
:param arg4: CultureInfo
:type arg4: System.CultureInfo
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3, arg4, arg5):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: String
:type arg3: System.String or str
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Int32
:type arg5: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3, arg4, arg5, arg6):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: String
:type arg3: System.String or str
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Int32
:type arg5: System.Int32 or int
:param arg6: Boolean
:type arg6: System.Boolean or bool
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3, arg4, arg5, arg6, arg7):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: String
:type arg3: System.String or str
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Int32
:type arg5: System.Int32 or int
:param arg6: Boolean
:type arg6: System.Boolean or bool
:param arg7: CultureInfo
:type arg7: System.CultureInfo
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3, arg4, arg5, arg6, arg7):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: String
:type arg3: System.String or str
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Int32
:type arg5: System.Int32 or int
:param arg6: CultureInfo
:type arg6: System.CultureInfo
:param arg7: CompareOptions
:type arg7: System.CompareOptions
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Compare(arg1, arg2, arg3, arg4, arg5, arg6):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: String
:type arg3: System.String or str
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Int32
:type arg5: System.Int32 or int
:param arg6: StringComparison
:type arg6: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def Compare(arg1=None, arg2=None, arg3=None, arg4=None, arg5=None, arg6=None, arg7=None):
pass
@staticmethod
@overload
def CompareTo(arg1):
'''
:param arg1: Object
:type arg1: System.Object
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def CompareTo(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def CompareTo(arg1=None):
pass
@staticmethod
@overload
def CompareOrdinal(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def CompareOrdinal(arg1, arg2, arg3, arg4, arg5):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: String
:type arg3: System.String or str
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Int32
:type arg5: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def CompareOrdinal(arg1=None, arg2=None, arg3=None, arg4=None, arg5=None):
pass
@staticmethod
def Contains(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def EndsWith(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def EndsWith(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: StringComparison
:type arg2: System.StringComparison
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def EndsWith(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Boolean
:type arg2: System.Boolean or bool
:param arg3: CultureInfo
:type arg3: System.CultureInfo
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def EndsWith(arg1=None, arg2=None, arg3=None):
pass
@staticmethod
@overload
def IndexOf(arg1):
'''
:param arg1: Char
:type arg1: System.Char
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1, arg2):
'''
:param arg1: Char
:type arg1: System.Char
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1, arg2, arg3):
'''
:param arg1: Char
:type arg1: System.Char
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: StringComparison
:type arg2: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: StringComparison
:type arg3: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOf(arg1, arg2, arg3, arg4):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:param arg4: StringComparison
:type arg4: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def IndexOf(arg1=None, arg2=None, arg3=None, arg4=None):
pass
@staticmethod
@overload
def IndexOfAny(arg1):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOfAny(arg1, arg2):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def IndexOfAny(arg1, arg2, arg3):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def IndexOfAny(arg1=None, arg2=None, arg3=None):
pass
@staticmethod
@overload
def LastIndexOf(arg1):
'''
:param arg1: Char
:type arg1: System.Char
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1, arg2):
'''
:param arg1: Char
:type arg1: System.Char
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1, arg2, arg3):
'''
:param arg1: Char
:type arg1: System.Char
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: StringComparison
:type arg2: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: StringComparison
:type arg3: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOf(arg1, arg2, arg3, arg4):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:param arg4: StringComparison
:type arg4: System.StringComparison
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def LastIndexOf(arg1=None, arg2=None, arg3=None, arg4=None):
pass
@staticmethod
@overload
def LastIndexOfAny(arg1):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOfAny(arg1, arg2):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def LastIndexOfAny(arg1, arg2, arg3):
'''
:param arg1: CharArray
:type arg1: System.CharArray
:param arg2: Int32
:type arg2: System.Int32 or int
:param arg3: Int32
:type arg3: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def LastIndexOfAny(arg1=None, arg2=None, arg3=None):
pass
@staticmethod
@overload
def PadLeft(arg1):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def PadLeft(arg1, arg2):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:param arg2: Char
:type arg2: System.Char
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def PadLeft(arg1=None, arg2=None):
pass
@staticmethod
@overload
def PadRight(arg1):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def PadRight(arg1, arg2):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:param arg2: Char
:type arg2: System.Char
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def PadRight(arg1=None, arg2=None):
pass
@staticmethod
@overload
def StartsWith(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def StartsWith(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: StringComparison
:type arg2: System.StringComparison
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def StartsWith(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Boolean
:type arg2: System.Boolean or bool
:param arg3: CultureInfo
:type arg3: System.CultureInfo
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def StartsWith(arg1=None, arg2=None, arg3=None):
pass
@staticmethod
@overload
def ToLower():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ToLower(arg1):
'''
:param arg1: CultureInfo
:type arg1: System.CultureInfo
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def ToLower(arg1=None):
pass
@staticmethod
def ToLowerInvariant():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ToUpper():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ToUpper(arg1):
'''
:param arg1: CultureInfo
:type arg1: System.CultureInfo
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def ToUpper(arg1=None):
pass
@staticmethod
def ToUpperInvariant():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ToString():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def ToString(arg1):
'''
:param arg1: IFormatProvider
:type arg1: System.IFormatProvider
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def ToString(arg1=None):
pass
@staticmethod
def Clone():
'''
:returns: Object
:rtype: System.Object
'''
pass
@staticmethod
def Insert(arg1, arg2):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:param arg2: String
:type arg2: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Replace(arg1, arg2):
'''
:param arg1: Char
:type arg1: System.Char
:param arg2: Char
:type arg2: System.Char
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Replace(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Replace(arg1=None, arg2=None):
pass
@staticmethod
@overload
def Remove(arg1, arg2):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:param arg2: Int32
:type arg2: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Remove(arg1):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Remove(arg1=None, arg2=None):
pass
@staticmethod
@overload
def Format(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Object
:type arg2: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Format(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Object
:type arg2: System.Object
:param arg3: Object
:type arg3: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Format(arg1, arg2, arg3, arg4):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: Object
:type arg2: System.Object
:param arg3: Object
:type arg3: System.Object
:param arg4: Object
:type arg4: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Format(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: ObjectArray
:type arg2: System.ObjectArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Format(arg1, arg2, arg3):
'''
:param arg1: IFormatProvider
:type arg1: System.IFormatProvider
:param arg2: String
:type arg2: System.String or str
:param arg3: Object
:type arg3: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Format(arg1, arg2, arg3, arg4):
'''
:param arg1: IFormatProvider
:type arg1: System.IFormatProvider
:param arg2: String
:type arg2: System.String or str
:param arg3: Object
:type arg3: System.Object
:param arg4: Object
:type arg4: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Format(arg1, arg2, arg3, arg4, arg5):
'''
:param arg1: IFormatProvider
:type arg1: System.IFormatProvider
:param arg2: String
:type arg2: System.String or str
:param arg3: Object
:type arg3: System.Object
:param arg4: Object
:type arg4: System.Object
:param arg5: Object
:type arg5: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Format(arg1, arg2, arg3):
'''
:param arg1: IFormatProvider
:type arg1: System.IFormatProvider
:param arg2: String
:type arg2: System.String or str
:param arg3: ObjectArray
:type arg3: System.ObjectArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Format(arg1=None, arg2=None, arg3=None, arg4=None, arg5=None):
pass
@staticmethod
def Copy(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1):
'''
:param arg1: Object
:type arg1: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1, arg2):
'''
:param arg1: Object
:type arg1: System.Object
:param arg2: Object
:type arg2: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1, arg2, arg3):
'''
:param arg1: Object
:type arg1: System.Object
:param arg2: Object
:type arg2: System.Object
:param arg3: Object
:type arg3: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1, arg2, arg3, arg4):
'''
:param arg1: Object
:type arg1: System.Object
:param arg2: Object
:type arg2: System.Object
:param arg3: Object
:type arg3: System.Object
:param arg4: Object
:type arg4: System.Object
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1):
'''
:param arg1: ObjectArray
:type arg1: System.ObjectArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1):
'''
:param arg1: Undefined variable
:type arg1: SystemCollectionsGenericIEnumerable.SystemCollectionsGenericIEnumerable
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1):
'''
:param arg1: Undefined variable
:type arg1: SystemCollectionsGenericIEnumerable.SystemCollectionsGenericIEnumerable
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1, arg2):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1, arg2, arg3):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:param arg3: String
:type arg3: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1, arg2, arg3, arg4):
'''
:param arg1: String
:type arg1: System.String or str
:param arg2: String
:type arg2: System.String or str
:param arg3: String
:type arg3: System.String or str
:param arg4: String
:type arg4: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
@overload
def Concat(arg1):
'''
:param arg1: StringArray
:type arg1: System.StringArray
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def Concat(arg1=None, arg2=None, arg3=None, arg4=None):
pass
@staticmethod
def Intern(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def IsInterned(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def GetTypeCode():
'''
:returns: TypeCode
:rtype: System.TypeCode
'''
pass
@staticmethod
def GetEnumerator():
'''
:returns: CharEnumerator
:rtype: System.CharEnumerator
'''
pass
@staticmethod
def GetType():
'''
:returns: Type
:rtype: System.Type
'''
pass
| 22.672104
| 93
| 0.535521
| 4,083
| 41,694
| 5.466079
| 0.024982
| 0.115423
| 0.119366
| 0.134286
| 0.948965
| 0.937942
| 0.928891
| 0.915539
| 0.886549
| 0.850928
| 0
| 0.050836
| 0.372044
| 41,694
| 1,838
| 94
| 22.68444
| 0.801581
| 0.475752
| 0
| 0.831667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.27
| false
| 0.27
| 0.005
| 0
| 0.276667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
3f49863a362466ea030d4fd7db62648e949ac89e
| 1,965
|
py
|
Python
|
netxlib/cisco/ise/read.py
|
vargyropoulos/netxlib
|
c0f05edf2e7800353a6628beca8dc661b05e885e
|
[
"MIT"
] | null | null | null |
netxlib/cisco/ise/read.py
|
vargyropoulos/netxlib
|
c0f05edf2e7800353a6628beca8dc661b05e885e
|
[
"MIT"
] | null | null | null |
netxlib/cisco/ise/read.py
|
vargyropoulos/netxlib
|
c0f05edf2e7800353a6628beca8dc661b05e885e
|
[
"MIT"
] | null | null | null |
# Import Modules required for this library
import requests
requests.packages.urllib3.disable_warnings()
# ------------------------------------- ------------------------------------- ------------------------------------- -------------------------------------
# Search Infoblox for entries using MAC address filter
def macadress(instance, version, username, password, mac, debug=0):
infoblox_url = 'https://%s/wapi/%s/search?mac_address=%s' % (instance,version,mac)
# Send HTTP GET request to Infoblox
if debug >= 4:
print ("DEBUG - Sending Data to Infoblox via: \n" + infoblox_url +"\n")
response = requests.get(infoblox_url, auth=(username, password), verify=False)
# Check for HTTP response codes other than 200
if response.status_code != 200:
if debug >= 4:
print('Status:', response.status_code)
print('Headers:', response.headers)
print('Error Response:', response.text)
http_response = response.text
else:
http_response = response.json()
return (response.status_code, response.headers, http_response)
# Search Infoblox for entries using network filter
def network(instance, version, username, password, network, debug=0):
infoblox_url = 'https://%s/wapi/%s/search?address=%s' % (instance,version,network)
# Send HTTP GET request to Infoblox
if debug >= 4:
print ("DEBUG - Sending Data to Infoblox via: \n" + infoblox_url +"\n")
response = requests.get(infoblox_url, auth=(username, password), verify=False)
# Check for HTTP response codes other than 200
if response.status_code != 200:
if debug >= 4:
print('Status:', response.status_code)
print('Headers:', response.headers)
print('Error Response:', response.text)
http_response = response.text
else:
http_response = response.json()
return (response.status_code, response.headers, http_response)
| 37.788462
| 153
| 0.619338
| 225
| 1,965
| 5.32
| 0.266667
| 0.080201
| 0.090226
| 0.043442
| 0.775272
| 0.726817
| 0.726817
| 0.726817
| 0.726817
| 0.670008
| 0
| 0.012117
| 0.202036
| 1,965
| 51
| 154
| 38.529412
| 0.751276
| 0.230025
| 0
| 0.8
| 0
| 0
| 0.146277
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0.133333
| 0.033333
| 0
| 0.166667
| 0.266667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
4544ccba06103206d1bcc1d57bb0a2dabfade432
| 1,137
|
py
|
Python
|
x_rebirth_station_calculator/station_data/ol__bofu_star_complex.py
|
Phipsz/XRebirthStationCalculator
|
ac31c2f5816be34a7df2d7c4eb4bd5e01f7ff835
|
[
"MIT"
] | 1
|
2016-04-17T11:00:22.000Z
|
2016-04-17T11:00:22.000Z
|
x_rebirth_station_calculator/station_data/ol__bofu_star_complex.py
|
Phipsz/XRebirthStationCalculator
|
ac31c2f5816be34a7df2d7c4eb4bd5e01f7ff835
|
[
"MIT"
] | null | null | null |
x_rebirth_station_calculator/station_data/ol__bofu_star_complex.py
|
Phipsz/XRebirthStationCalculator
|
ac31c2f5816be34a7df2d7c4eb4bd5e01f7ff835
|
[
"MIT"
] | null | null | null |
from x_rebirth_station_calculator.station_data import modules
from x_rebirth_station_calculator.station_data.station_base import Station
names = {'L044': 'BoFu Star Complex',
'L049': 'BoFu-Sternenplex'}
smodules = [modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158),
modules.BoFuKitchen(production_method='ar', efficiency=158)]
OL_BoFuStarComplex = Station(names, smodules)
| 54.142857
| 74
| 0.716799
| 118
| 1,137
| 6.720339
| 0.211864
| 0.272383
| 0.423707
| 0.514502
| 0.842371
| 0.842371
| 0.842371
| 0.741488
| 0.741488
| 0.741488
| 0
| 0.044351
| 0.167106
| 1,137
| 20
| 75
| 56.85
| 0.793031
| 0
| 0
| 0.588235
| 0
| 0
| 0.057168
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.117647
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
18e550584d918fae200c00784dba955da5edbce5
| 9,314
|
py
|
Python
|
etl_base/dags/sqlg_jobs_CUS.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | null | null | null |
etl_base/dags/sqlg_jobs_CUS.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | null | null | null |
etl_base/dags/sqlg_jobs_CUS.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | 1
|
2022-03-10T03:47:35.000Z
|
2022-03-10T03:47:35.000Z
|
# -*- coding: utf-8 -*-
# Author : Jesse Wei
# LastUpdate : 2020/10/04
# Impact : Jobs generated by SQLG
# Message : Humanity towards others, we live by sharing. Fear can hold you prisoner, only hope can set you free.
# from __future__ import print_function
import logging
import re
import airflow
from datetime import datetime, timedelta
from airflow.operators.sensors import ExternalTaskSensor
from airflow.operators.python_operator import PythonOperator
from airflow.operators.bash_operator import BashOperator
from airflow.contrib.sensors.file_sensor import FileSensor
from airflow import models
from airflow.models import Variable
from acme.operators.sqlg_oracle import OracleOperatorWithTemplatedParams
from airflow.operators.oracle_operator import OracleOperator
# DB_NAME = 'DWH'
# JOB_TYPE=ODS-MAIN
my_taskid = "HZ_CUST_ACCOUNTS"
HZ_CUST_ACCOUNTS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
",${END_DT_CHAR}"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "HZ_PARTIES"
HZ_PARTIES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
",${END_DT_CHAR}"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_DATE_INI"
SDM_DATE_INI = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MEETING_MINUTES"
SDM_MEETING_MINUTES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CUSTOMER_COMPANY_CHECK"
SDM_CUSTOMER_COMPANY_CHECK = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MODEL"
SDM_MODEL = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_PREMIUM_FREIGHT"
SDM_PREMIUM_FREIGHT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "REF_PRODUCT_TECHNOLOGY"
REF_PRODUCT_TECHNOLOGY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "REF_SUB_GROUP_CUSTOMER"
REF_SUB_GROUP_CUSTOMER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "REF_MARKET_SHARE_PRODUCT"
REF_MARKET_SHARE_PRODUCT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "REF_PRODUCT_SEGMENT"
REF_PRODUCT_SEGMENT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "REF_END_CUSTOMER"
REF_END_CUSTOMER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MARKET_SHARE"
SDM_MARKET_SHARE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MARKET_TAM_CAGR"
SDM_MARKET_TAM_CAGR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_EMPLOYEE_H"
SDM_EMPLOYEE_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
",${END_DT_CHAR}"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_PRODUCT_TECHNOLOGY"
DIM_PRODUCT_TECHNOLOGY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_SUB_GROUP_CUSTOMER"
DIM_SUB_GROUP_CUSTOMER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MARKET_SHARE_PRODUCT"
DIM_MARKET_SHARE_PRODUCT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_PRODUCT_SEGMENT"
DIM_PRODUCT_SEGMENT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_END_CUSTOMER"
DIM_END_CUSTOMER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MODEL"
DIM_MODEL = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_CUSTOMER"
DIM_CUSTOMER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_GROUP_CUSTOMER"
DIM_GROUP_CUSTOMER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_MARKET_SHARE"
FCT_MARKET_SHARE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_MARKET_TAM_CAGR"
FCT_MARKET_TAM_CAGR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_PREMIUM_FREIGHT"
FCT_PREMIUM_FREIGHT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CCM_RANK"
FCT_CCM_RANK = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CCM_REPORT"
FCT_CCM_REPORT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CCM_BU_REPORT"
FCT_CCM_BU_REPORT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_MEETING_MINUTES"
FCT_MEETING_MINUTES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CUSTOMER_COMPANY_CHECK"
FCT_CUSTOMER_COMPANY_CHECK = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
| 28.570552
| 118
| 0.650311
| 1,104
| 9,314
| 5.066123
| 0.102355
| 0.133023
| 0.082067
| 0.077597
| 0.801716
| 0.801716
| 0.801716
| 0.797068
| 0.797068
| 0.797068
| 0
| 0.001201
| 0.195727
| 9,314
| 325
| 119
| 28.658462
| 0.745428
| 0.090402
| 0
| 0.578313
| 1
| 0
| 0.281936
| 0.022299
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.048193
| 0
| 0.048193
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18fc76b4ad5df69d3a136bf203dfa885a49b692a
| 16,051
|
py
|
Python
|
test/test_parameter_functions.py
|
friggog/py-c3d
|
e2d85de15335ded44e906855b081420d32639439
|
[
"MIT"
] | 71
|
2015-04-21T23:18:00.000Z
|
2022-03-30T14:03:59.000Z
|
test/test_parameter_functions.py
|
friggog/py-c3d
|
e2d85de15335ded44e906855b081420d32639439
|
[
"MIT"
] | 34
|
2015-04-06T13:07:46.000Z
|
2022-03-22T07:43:45.000Z
|
test/test_parameter_functions.py
|
friggog/py-c3d
|
e2d85de15335ded44e906855b081420d32639439
|
[
"MIT"
] | 35
|
2015-02-09T18:58:43.000Z
|
2022-03-10T08:56:47.000Z
|
import c3d
import struct
import unittest
import numpy as np
def genByteWordArr(word, shape):
''' Generate a multi-dimensional byte array from a specific word.
'''
arr = np.array(word)
for d in shape[::-1]:
arr = arr[np.newaxis].repeat(d, 0)
return arr, [len(word)] + [d for d in shape]
def genRndByteArr(wordlen, shape, pad):
''' Generate a multi-dimensional byte array with random data.
'''
tot_len = wordlen + pad*wordlen
arr = np.empty(shape, dtype=np.dtype('S'+str(tot_len)))
for i in np.ndindex(arr.shape):
bytes = np.random.randint(21, 126, wordlen).astype(np.uint8)
if pad:
bytes = np.hstack((bytes, np.array([b'255']*wordlen, dtype=np.uint8)))
arr[i] = bytes.tobytes()
return arr, [tot_len] + [d for d in shape]
def genRndFloatArr(shape, rnd, range=(-1e6, 1e6)):
''' Generate a multi-dimensional array of 32 bit floating point data.
'''
return rnd.uniform(range[0], range[1], shape)
class ParameterValueTest(unittest.TestCase):
''' Test read Parameter arrays
'''
RANGE_8_BIT = (-127, 127)
RANGE_16_BIT = (-1e4, 1e4)
RANGE_32_BIT = (-1e6, 1e6)
RANGE_8_UNSIGNED_BIT = (0, 255)
RANGE_16_UNSIGNED_BIT = (0, 1e4)
RANGE_32_UNSIGNED_BIT = (0, 1e6)
TEST_ITERATIONS = 1000
def setUp(self):
self.rnd = np.random.default_rng()
self.dtypes = c3d.DataTypes(c3d.PROCESSOR_INTEL)
def test_a_param_float32(self):
''' Verify a single 32 bit floating point value is parsed correctly
'''
for i in range(ParameterValueTest.TEST_ITERATIONS):
value = np.float32(self.rnd.uniform(*ParameterValueTest.RANGE_32_BIT))
bytes = struct.pack('<f', value)
P = c3d.Param('FLOAT_TEST', self.dtypes, bytes_per_element=4, dimensions=[1], bytes=bytes)
value_out = P.float_value
assert value == value_out, 'Parameter float was not read correctly. Was %f, expected %f' %\
(value_out, value)
def test_b_param_int32(self):
''' Verify a single 32 bit integer value is parsed correctly
'''
for i in range(ParameterValueTest.TEST_ITERATIONS):
value = np.int32(self.rnd.uniform(*ParameterValueTest.RANGE_32_BIT))
bytes = struct.pack('<i', value)
P = c3d.Param('INT32_TEST', self.dtypes, bytes_per_element=4, dimensions=[1], bytes=bytes)
value_out = P.int32_value
assert value == value_out, 'Parameter int32 was not read correctly. Was %f, expected %f' %\
(value_out, value)
def test_b_param_uint32(self):
''' Verify a single 32 bit unsigned integer value is parsed correctly
'''
for i in range(ParameterValueTest.TEST_ITERATIONS):
value = np.uint32(self.rnd.uniform(*ParameterValueTest.RANGE_32_UNSIGNED_BIT))
bytes = struct.pack('<I', value)
P = c3d.Param('UINT32_TEST', self.dtypes, bytes_per_element=4, dimensions=[1], bytes=bytes)
value_out = P.int32_value
assert value == value_out, 'Parameter uint32 was not read correctly. Was %f, expected %f' %\
(value_out, value)
def test_b_param_int16(self):
''' Verify a single 16 bit integer value is parsed correctly
'''
for i in range(ParameterValueTest.TEST_ITERATIONS):
value = np.int16(self.rnd.uniform(*ParameterValueTest.RANGE_16_BIT))
bytes = struct.pack('<h', value)
P = c3d.Param('INT16_TEST', self.dtypes, bytes_per_element=2, dimensions=[1], bytes=bytes)
value_out = P.int16_value
assert value == value_out, 'Parameter int16 was not read correctly. Was %f, expected %f' %\
(value_out, value)
def test_b_param_uint16(self):
''' Verify a single 16 bit unsigned integer value is parsed correctly
'''
for i in range(ParameterValueTest.TEST_ITERATIONS):
value = np.uint16(self.rnd.uniform(*ParameterValueTest.RANGE_16_UNSIGNED_BIT))
bytes = struct.pack('<H', value)
P = c3d.Param('UINT16_TEST', self.dtypes, bytes_per_element=2, dimensions=[1], bytes=bytes)
value_out = P.uint16_value
assert value == value_out, 'Parameter uint16 was not read correctly. Was %f, expected %f' %\
(value_out, value)
def test_b_param_int8(self):
''' Verify a single 8 bit integer value is parsed correctly
'''
for i in range(ParameterValueTest.TEST_ITERATIONS):
value = np.int8(self.rnd.uniform(*ParameterValueTest.RANGE_8_BIT))
bytes = struct.pack('<b', value)
P = c3d.Param('INT8_TEST', self.dtypes, bytes_per_element=1, dimensions=[1], bytes=bytes)
value_out = P.int8_value
assert value == value_out, 'Parameter int8 was not read correctly. Was %f, expected %f' %\
(value_out, value)
def test_b_param_uint8(self):
''' Verify a single 8 bit unsigned integer value is parsed correctly
'''
for i in range(ParameterValueTest.TEST_ITERATIONS):
value = np.uint8(self.rnd.uniform(*ParameterValueTest.RANGE_8_UNSIGNED_BIT))
bytes = struct.pack('<B', value)
P = c3d.Param('UINT8_TEST', self.dtypes, bytes_per_element=1, dimensions=[1], bytes=bytes)
value_out = P.uint8_value
assert value == value_out, 'Parameter uint8 was not read correctly. Was %f, expected %f' %\
(value_out, value)
class ParameterArrayTest(unittest.TestCase):
''' Test read Parameter arrays
'''
SHAPES = [[7, 6, 5], [7, 5, 3], [7, 3], [19]]
def setUp(self):
self.rnd = np.random.default_rng()
self.dtypes = c3d.DataTypes(c3d.PROCESSOR_INTEL)
def test_a_parse_float32_array(self):
''' Verify array of 32 bit floating point values are parsed correctly
'''
flt_range = (-1e6, 1e6)
for shape in ParameterArrayTest.SHAPES:
arr = self.rnd.uniform(flt_range[0], flt_range[1], size=shape).astype(np.float32)
P = c3d.Param('FLOAT_TEST', self.dtypes, bytes_per_element=4, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.float_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'float_array' converted shape"
assert np.all(arr.T == arr_out), 'Value mismatch when reading float array'
def test_b_parse_int32_array(self):
''' Verify array of 32 bit integer values are parsed correctly
'''
flt_range = (-1e6, 1e6)
for shape in ParameterArrayTest.SHAPES:
arr = self.rnd.uniform(flt_range[0], flt_range[1], size=shape).astype(np.int32)
P = c3d.Param('INT32_TEST', self.dtypes, bytes_per_element=4, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.int32_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'int32_array' converted shape"
assert np.all(arr.T == arr_out), 'Value mismatch when reading int32 array'
def test_c_parse_uint32_array(self):
''' Verify array of 32 bit unsigned integer values are parsed correctly
'''
flt_range = (0, 1e6)
for shape in ParameterArrayTest.SHAPES:
arr = self.rnd.uniform(flt_range[0], flt_range[1], size=shape).astype(np.uint32)
P = c3d.Param('UINT32_TEST', self.dtypes, bytes_per_element=4, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.uint32_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'uint32_array' converted shape"
assert np.all(arr.T == arr_out), 'Value mismatch when reading uint32 array'
def test_d_parse_int16_array(self):
''' Verify array of 16 bit integer values are parsed correctly
'''
flt_range = (-1e4, 1e4)
for shape in ParameterArrayTest.SHAPES:
arr = self.rnd.uniform(flt_range[0], flt_range[1], size=shape).astype(np.int16)
P = c3d.Param('INT16_TEST', self.dtypes, bytes_per_element=2, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.int16_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'int32_array' converted shape"
assert np.all(arr.T == arr_out), 'Value mismatch when reading int32 array'
def test_e_parse_uint16_array(self):
''' Verify array of 16 bit unsigned integer values are parsed correctly
'''
flt_range = (0, 1e4)
for shape in ParameterArrayTest.SHAPES:
arr = self.rnd.uniform(flt_range[0], flt_range[1], size=shape).astype(np.uint16)
P = c3d.Param('UINT16_TEST', self.dtypes, bytes_per_element=2, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.uint16_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'uint32_array' converted shape"
assert np.all(arr.T == arr_out), 'Value mismatch when reading uint32 array'
def test_e_parse_int8_array(self):
''' Verify array of 8 bit integer values are parsed correctly
'''
flt_range = (-127, 127)
for shape in ParameterArrayTest.SHAPES:
arr = self.rnd.uniform(flt_range[0], flt_range[1], size=shape).astype(np.int8)
P = c3d.Param('INT8_TEST', self.dtypes, bytes_per_element=1, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.int8_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'int32_array' converted shape"
assert np.all(arr.T == arr_out), 'Value mismatch when reading int32 array'
def test_f_parse_uint8_array(self):
''' Verify array of 8 bit unsigned integer values are parsed correctly
'''
flt_range = (0, 255)
for shape in ParameterArrayTest.SHAPES:
arr = self.rnd.uniform(flt_range[0], flt_range[1], size=shape).astype(np.uint8)
P = c3d.Param('UINT8_TEST', self.dtypes, bytes_per_element=1, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.uint8_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'uint32_array' converted shape"
assert np.all(arr.T == arr_out), 'Value mismatch when reading uint32 array'
def test_g_parse_byte_array(self):
''' Verify byte arrays are parsed correctly
'''
word = b'WRIST'
# 1 dims
arr = np.array(word).repeat(3).repeat(3).repeat(3)
P = c3d.Param('BYTE_TEST', self.dtypes, bytes_per_element=1, dimensions=arr.shape, bytes=arr.T.tobytes())
arr_out = P.bytes_array
assert arr.shape[1:] == arr_out.shape, "Mismatch in 'bytes_array' converted shape"
assert np.all(arr.tobytes() == arr_out), 'Mismatch in reading single dimensional byte array'
# 4 dims
arr, shape = genByteWordArr(word, [5, 4, 3])
P = c3d.Param('BYTE_TEST', self.dtypes, bytes_per_element=1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.bytes_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'bytes_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert np.all(arr[i[::-1]] == arr_out[i]), "Mismatch in 'bytes_array' converted value at index %s" % str(i)
# 5 dims
arr, shape = genByteWordArr(word, [6, 5, 4, 3])
P = c3d.Param('BYTE_TEST', self.dtypes, bytes_per_element=1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.bytes_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'bytes_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert np.all(arr[i[::-1]] == arr_out[i]), "Mismatch in 'bytes_array' converted value at index %s" % str(i)
def test_h_parse_string_array(self):
''' Verify repeated word arrays are parsed correctly
'''
word = b'ANCLE'
# 3 dims
arr, shape = genByteWordArr(word, [7, 3])
P = c3d.Param('STRING_TEST', self.dtypes, bytes_per_element=-1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.string_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'string_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert self.dtypes.decode_string(arr[i[::-1]]) == arr_out[i],\
"Mismatch in 'string_array' converted value at index %s" % str(i)
# 4 dims
arr, shape = genByteWordArr(word, [5, 4, 3])
P = c3d.Param('STRING_TEST', self.dtypes, bytes_per_element=-1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.string_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'string_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert self.dtypes.decode_string(arr[i[::-1]]) == arr_out[i],\
"Mismatch in 'string_array' converted value at index %s" % str(i)
# 5 dims
arr, shape = genByteWordArr(word, [6, 5, 4, 3])
P = c3d.Param('STRING_TEST', self.dtypes, bytes_per_element=-1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.string_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'string_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert self.dtypes.decode_string(arr[i[::-1]]) == arr_out[i],\
"Mismatch in 'string_array' converted value at index %s" % str(i)
def test_i_parse_random_string_array(self):
''' Verify random word arrays are parsed correctly
'''
##
# RND
# 3 dims
for wlen in range(10):
arr, shape = genRndByteArr(wlen, [7, 3], wlen > 5)
P = c3d.Param('STRING_TEST', self.dtypes, bytes_per_element=-1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.string_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'string_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert self.dtypes.decode_string(arr[i[::-1]]) == arr_out[i],\
"Mismatch in 'string_array' converted value at index %s" % str(i)
# 4 dims
for wlen in range(10):
arr, shape = genRndByteArr(wlen, [7, 5, 3], wlen > 5)
P = c3d.Param('STRING_TEST', self.dtypes, bytes_per_element=-1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.string_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'string_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert self.dtypes.decode_string(arr[i[::-1]]) == arr_out[i],\
"Mismatch in 'string_array' converted value at index %s" % str(i)
# 5 dims
for wlen in range(10):
arr, shape = genRndByteArr(wlen, [7, 6, 5, 3], wlen > 5)
P = c3d.Param('STRING_TEST', self.dtypes, bytes_per_element=-1, dimensions=shape, bytes=arr.T.tobytes())
arr_out = P.string_array
assert arr.T.shape == arr_out.shape, "Mismatch in 'string_array' converted shape. Was %s, expected %s" %\
(str(arr_out.shape), str(arr.T.shape))
for i in np.ndindex(arr_out.shape):
assert self.dtypes.decode_string(arr[i[::-1]]) == arr_out[i],\
"Mismatch in 'string_array' converted value at index %s" % str(i)
if __name__ == '__main__':
unittest.main()
| 47.488166
| 119
| 0.618591
| 2,231
| 16,051
| 4.295831
| 0.06589
| 0.040067
| 0.036728
| 0.045597
| 0.868531
| 0.858306
| 0.776711
| 0.757617
| 0.747809
| 0.729967
| 0
| 0.031968
| 0.257492
| 16,051
| 337
| 120
| 47.62908
| 0.772193
| 0.093265
| 0
| 0.466667
| 1
| 0
| 0.158463
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 1
| 0.097778
| false
| 0
| 0.017778
| 0
| 0.173333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18fcd155ee3e928dee14f26d3e8a1c43e3fd0b18
| 167
|
py
|
Python
|
cool_defi_bot/__init__.py
|
ryandvill/cool-defi-bot
|
5e513e324db9f626a5c281d44bd7330eadf12889
|
[
"MIT"
] | 8
|
2020-04-13T18:03:09.000Z
|
2021-06-21T11:21:46.000Z
|
cool_defi_bot/__init__.py
|
ryandvill/cool-defi-bot
|
5e513e324db9f626a5c281d44bd7330eadf12889
|
[
"MIT"
] | 4
|
2020-04-01T14:44:09.000Z
|
2020-04-07T11:01:17.000Z
|
cool_defi_bot/__init__.py
|
ryandvill/cool-defi-bot
|
5e513e324db9f626a5c281d44bd7330eadf12889
|
[
"MIT"
] | 4
|
2020-05-20T22:30:00.000Z
|
2021-11-17T21:38:31.000Z
|
from .api import getters
from .api import formatters
from .api import getters
from .api import custom_exceptions
from .api import helpers
from .api import api_handlers
| 27.833333
| 34
| 0.826347
| 26
| 167
| 5.230769
| 0.346154
| 0.308824
| 0.573529
| 0.294118
| 0.485294
| 0.485294
| 0.485294
| 0
| 0
| 0
| 0
| 0
| 0.137725
| 167
| 6
| 35
| 27.833333
| 0.944444
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e19733e65ed312bb668c350456946ae90a4dbd3e
| 9,680
|
py
|
Python
|
pypykatz/ldap/cmdhelper.py
|
m0xbf/pypykatz-copy
|
39d8b06861d9ccd615e8107707f56f6556fb15a0
|
[
"MIT"
] | 5
|
2019-04-20T05:34:01.000Z
|
2019-10-12T01:26:09.000Z
|
pypykatz/ldap/cmdhelper.py
|
m0xbf/pypykatz-copy
|
39d8b06861d9ccd615e8107707f56f6556fb15a0
|
[
"MIT"
] | 1
|
2018-09-13T15:20:29.000Z
|
2018-09-13T15:20:29.000Z
|
pypykatz/ldap/cmdhelper.py
|
m0xbf/pypykatz-copy
|
39d8b06861d9ccd615e8107707f56f6556fb15a0
|
[
"MIT"
] | 8
|
2018-09-11T22:02:22.000Z
|
2019-11-27T08:52:20.000Z
|
#!/usr/bin/env python3
#
# Author:
# Tamas Jos (@skelsec)
#
from pypykatz import logging
"""
LDAP is not part of pypykatz directly.
This is a wrapper for msldap, ldap3 and winsspi packages
"""
class LDAPCMDHelper:
def __init__(self):
self.live_keywords = ['ldap']
self.keywords = ['ldap']
def add_args(self, parser, live_parser):
group = parser.add_parser('ldap', help='LDAP (live) related commands')
group.add_argument('credential', help= 'Credential to be used')
group.add_argument('cmd', choices=['spn', 'asrep','dump','custom'])
group.add_argument('-o','--out-file', help= 'File to stroe results in')
group.add_argument('-a','--attrs', action='append', help='DUMP and CUSTOM mode only. LDAP attributes to display. Can be stacked')
group.add_argument('-f','--filter', help='CUSTOM mode only. LDAP search filter')
live_group = live_parser.add_parser('ldap', help='LDAP (live) related commands')
live_group.add_argument('-c','--credential', help= 'Credential to be used, if omitted it will use teh credentials of the current user. If specified, it will try to impersonate the user. (requires the the target user has a session on the local computer)')
live_group.add_argument('--dc-ip', help= 'IP address or hostname of the LDAP server. Optional. If omitted will use registry to check for the DC.')
live_group.add_argument('cmd', choices=['spn', 'asrep','dump','custom'])
live_group.add_argument('-o','--out-file', help= 'File to stroe results in')
live_group.add_argument('-a','--attrs', action='append', help='DUMP and CUSTOM mode only. LDAP attributes to display. Can be stacked')
live_group.add_argument('-f','--filter', help='CUSTOM mode only. LDAP search filter')
def execute(self, args):
if args.command in self.keywords:
self.run(args)
if len(self.live_keywords) > 0 and args.command == 'live' and args.module in self.live_keywords:
self.run_live(args)
def run_live(self, args):
from msldap.core import MSLDAPCredential, MSLDAPTarget, MSLDAPConnection
from msldap.ldap_objects import MSADUser
from msldap import logger as msldaplogger
from pypykatz.commons.winapi.machine import LiveMachine
machine = LiveMachine()
if args.credential:
creds = MSLDAPCredential.from_connection_string(args.credential)
else:
creds = MSLDAPCredential.get_dummy_sspi()
if args.dc_ip:
target = MSLDAPTarget(args.dc_ip)
else:
target = MSLDAPTarget(machine.get_domain())
connection = MSLDAPConnection(creds, target)
connection.connect()
try:
adinfo = connection.get_ad_info()
domain = adinfo.distinguishedName.replace('DC=','').replace(',','.')
except Exception as e:
logging.warning('[LDAP] Failed to get domain name from LDAP server. This is not normal, but happens. Reason: %s' % e)
domain = machine.get_domain()
if args.cmd == 'spn':
logging.debug('Enumerating SPN user accounts...')
cnt = 0
if args.out_file:
with open(os.path.join(basefolder,basefile+'_spn_users.txt'), 'w', newline='') as f:
for user in connection.get_all_service_user_objects():
cnt += 1
f.write('%s/%s\r\n' % (domain, user.sAMAccountName))
else:
print('[+] SPN users')
for user in connection.get_all_service_user_objects():
cnt += 1
print('%s/%s' % (domain, user.sAMAccountName))
logging.debug('Enumerated %d SPN user accounts' % cnt)
elif args.cmd == 'asrep':
logging.debug('Enumerating ASREP user accounts...')
ctr = 0
if args.out_file:
with open(os.path.join(basefolder,basefile+'_asrep_users.txt'), 'w', newline='') as f:
for user in connection.get_all_knoreq_user_objects():
ctr += 1
f.write('%s/%s\r\n' % (domain, user.sAMAccountName))
else:
print('[+] ASREP users')
for user in connection.get_all_knoreq_user_objects():
ctr += 1
print('%s/%s' % (domain, user.sAMAccountName))
logging.debug('Enumerated %d ASREP user accounts' % ctr)
elif args.cmd == 'dump':
logging.debug('Enumerating ALL user accounts, this will take some time depending on the size of the domain')
ctr = 0
attrs = args.attrs if args.attrs is not None else MSADUser.TSV_ATTRS
if args.out_file:
with open(os.path.join(basefolder,basefile+'_ldap_users.tsv'), 'w', newline='', encoding ='utf8') as f:
writer = csv.writer(f, delimiter = '\t')
writer.writerow(attrs)
for user in connection.get_all_user_objects():
ctr += 1
writer.writerow(user.get_row(attrs))
else:
logging.debug('Are you sure about this?')
print('[+] Full user dump')
print('\t'.join(attrs))
for user in connection.get_all_user_objects():
ctr += 1
print('\t'.join([str(x) for x in user.get_row(attrs)]))
logging.debug('Enumerated %d user accounts' % ctr)
elif args.cmd == 'custom':
if not args.filter:
raise Exception('Custom LDAP search requires the search filter to be specified!')
if not args.attrs:
raise Exception('Custom LDAP search requires the attributes to be specified!')
logging.debug('Perforing search on the AD with the following filter: %s' % args.filter)
logging.debug('Search will contain the following attributes: %s' % ','.join(args.attrs))
ctr = 0
if args.out_file:
with open(os.path.join(basefolder,basefile+'_ldap_custom.tsv'), 'w', newline='') as f:
writer = csv.writer(f, delimiter = '\t')
writer.writerow(args.attrs)
for obj in connection.pagedsearch(args.filter, args.attrs):
ctr += 1
writer.writerow([str(obj['attributes'].get(x, 'N/A')) for x in args.attrs])
else:
for obj in connection.pagedsearch(args.filter, args.attrs):
ctr += 1
print('\t'.join([str(obj['attributes'].get(x, 'N/A')) for x in args.attrs]))
logging.debug('Custom search yielded %d results!' % ctr)
def run(self, args):
from msldap.core import MSLDAPCredential, MSLDAPTarget, MSLDAPConnection
from msldap.ldap_objects import MSADUser
from msldap import logger as msldaplogger
if not args.credential:
raise Exception('You must provide credentials when using ldap in platform independent mode.')
creds = MSLDAPCredential.from_connection_string(args.credential)
target = MSLDAPTarget.from_connection_string(args.credential)
connection = MSLDAPConnection(creds, target)
connection.connect()
try:
adinfo = connection.get_ad_info()
domain = adinfo.distinguishedName.replace('DC=','').replace(',','.')
except Exception as e:
logging.warning('[LDAP] Failed to get domain name from LDAP server. This is not normal, but happens. Reason: %s' % e)
domain = machine.get_domain()
if args.cmd == 'spn':
logging.debug('Enumerating SPN user accounts...')
cnt = 0
if args.out_file:
with open(os.path.join(basefolder,basefile+'_spn_users.txt'), 'w', newline='') as f:
for user in connection.get_all_service_user_objects():
cnt += 1
f.write('%s/%s\r\n' % (domain, user.sAMAccountName))
else:
print('[+] SPN users')
for user in connection.get_all_service_user_objects():
cnt += 1
print('%s/%s' % (domain, user.sAMAccountName))
logging.debug('Enumerated %d SPN user accounts' % cnt)
elif args.cmd == 'asrep':
logging.debug('Enumerating ASREP user accounts...')
ctr = 0
if args.out_file:
with open(os.path.join(basefolder,basefile+'_asrep_users.txt'), 'w', newline='') as f:
for user in connection.get_all_knoreq_user_objects():
ctr += 1
f.write('%s/%s\r\n' % (domain, user.sAMAccountName))
else:
print('[+] ASREP users')
for user in connection.get_all_knoreq_user_objects():
ctr += 1
print('%s/%s' % (domain, user.sAMAccountName))
logging.debug('Enumerated %d ASREP user accounts' % ctr)
elif args.cmd == 'dump':
logging.debug('Enumerating ALL user accounts, this will take some time depending on the size of the domain')
ctr = 0
attrs = args.attrs if args.attrs is not None else MSADUser.TSV_ATTRS
if args.out_file:
with open(os.path.join(basefolder,basefile+'_ldap_users.tsv'), 'w', newline='', encoding ='utf8') as f:
writer = csv.writer(f, delimiter = '\t')
writer.writerow(attrs)
for user in connection.get_all_user_objects():
ctr += 1
writer.writerow(user.get_row(attrs))
else:
logging.debug('Are you sure about this?')
print('[+] Full user dump')
print('\t'.join(attrs))
for user in connection.get_all_user_objects():
ctr += 1
print('\t'.join([str(x) for x in user.get_row(attrs)]))
logging.debug('Enumerated %d user accounts' % ctr)
elif args.cmd == 'custom':
if not args.filter:
raise Exception('Custom LDAP search requires the search filter to be specified!')
if not args.attrs:
raise Exception('Custom LDAP search requires the attributes to be specified!')
logging.debug('Perforing search on the AD with the following filter: %s' % args.filter)
logging.debug('Search will contain the following attributes: %s' % ','.join(args.attrs))
ctr = 0
if args.out_file:
with open(os.path.join(basefolder,basefile+'_ldap_custom.tsv'), 'w', newline='') as f:
writer = csv.writer(f, delimiter = '\t')
writer.writerow(args.attrs)
for obj in connection.pagedsearch(args.filter, args.attrs):
ctr += 1
writer.writerow([str(obj['attributes'].get(x, 'N/A')) for x in args.attrs])
else:
for obj in connection.pagedsearch(args.filter, args.attrs):
ctr += 1
print('\t'.join([str(obj['attributes'].get(x, 'N/A')) for x in args.attrs]))
logging.debug('Custom search yielded %d results!' % ctr)
| 38.110236
| 256
| 0.67686
| 1,374
| 9,680
| 4.677584
| 0.150655
| 0.037342
| 0.016804
| 0.035475
| 0.841139
| 0.835849
| 0.825891
| 0.808775
| 0.808775
| 0.780769
| 0
| 0.003676
| 0.184917
| 9,680
| 254
| 257
| 38.110236
| 0.8109
| 0.005269
| 0
| 0.816327
| 0
| 0.020408
| 0.265938
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02551
| false
| 0
| 0.040816
| 0
| 0.071429
| 0.081633
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1bdf9b24e13c3cb4a18a1f3a2c1def4fdee4b3f
| 181
|
py
|
Python
|
tests/parser/aggregates.count.15b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.count.15b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.count.15b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
i(0). i(1).
a(X) | -a(X) :- i(X).
ok :- 0 < #count{X:a(X)}< 2.
:- not ok.
"""
output = """
i(0). i(1).
a(X) | -a(X) :- i(X).
ok :- 0 < #count{X:a(X)}< 2.
:- not ok.
"""
| 13.923077
| 28
| 0.359116
| 40
| 181
| 1.625
| 0.275
| 0.184615
| 0.184615
| 0.123077
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0
| 0.056738
| 0.220994
| 181
| 12
| 29
| 15.083333
| 0.404255
| 0
| 0
| 0.833333
| 0
| 0
| 0.828729
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
bef2bb3bf5da30794955067952387aa59e2020a4
| 117
|
py
|
Python
|
cytominer_eval/__init__.py
|
michaelbornholdt/cytominer-eval
|
97b471dd4141d29bfcb06921cb1e294596c39ecf
|
[
"BSD-3-Clause"
] | 4
|
2020-06-11T20:31:17.000Z
|
2021-02-12T04:12:43.000Z
|
cytominer_eval/__init__.py
|
michaelbornholdt/cytominer-eval
|
97b471dd4141d29bfcb06921cb1e294596c39ecf
|
[
"BSD-3-Clause"
] | 46
|
2020-06-16T11:31:49.000Z
|
2021-12-07T10:52:00.000Z
|
cytominer_eval/__init__.py
|
michaelbornholdt/cytominer-eval
|
97b471dd4141d29bfcb06921cb1e294596c39ecf
|
[
"BSD-3-Clause"
] | 6
|
2020-06-11T18:36:31.000Z
|
2021-04-15T19:38:52.000Z
|
from .evaluate import evaluate
from cytominer_eval import __about__
from cytominer_eval.__about__ import __version__
| 29.25
| 48
| 0.880342
| 15
| 117
| 5.933333
| 0.466667
| 0.292135
| 0.382022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 117
| 3
| 49
| 39
| 0.847619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
833058d5aa0697b0fd9cb6bcc6cb007d48d7e604
| 13,205
|
py
|
Python
|
FlowNet2_src/models/flownet2.py
|
vt-vl-lab/pytorch_flownet2
|
d476e78889b9677473a9591cff678eb6c1dde2b9
|
[
"Apache-2.0"
] | 90
|
2018-02-08T01:58:52.000Z
|
2020-03-21T00:55:34.000Z
|
FlowNet2_src/models/flownet2.py
|
vt-vl-lab/pytorch_flownet2
|
d476e78889b9677473a9591cff678eb6c1dde2b9
|
[
"Apache-2.0"
] | 11
|
2018-02-28T13:45:34.000Z
|
2019-05-24T08:47:01.000Z
|
FlowNet2_src/models/flownet2.py
|
vt-vl-lab/pytorch_flownet2
|
d476e78889b9677473a9591cff678eb6c1dde2b9
|
[
"Apache-2.0"
] | 33
|
2018-02-28T04:44:10.000Z
|
2020-03-11T23:46:46.000Z
|
import torch
import torch.nn as nn
import torch.nn.init as nn_init
from .components import FlowNetC, FlowNetS, FlowNetSD, FlowNetFusion
# (Yuliang) Change directory structure
from .components import tofp16, tofp32, save_grad
from .components import ChannelNorm, Resample2d
class FlowNet2(nn.Module):
def __init__(self,
with_bn=False,
fp16=False,
rgb_max=255.,
div_flow=20.,
grads=None):
super(FlowNet2, self).__init__()
self.with_bn = with_bn
self.div_flow = div_flow
self.rgb_max = rgb_max
self.grads = {} if grads is None else grads
self.channelnorm = ChannelNorm()
# First Block (FlowNetC)
self.flownetc = FlowNetC(with_bn=with_bn, fp16=fp16)
self.upsample1 = nn.Upsample(scale_factor=4, mode='bilinear')
self.resample1 = (nn.Sequential(tofp32(), Resample2d(), tofp16())
if fp16 else Resample2d())
# Block (FlowNetS1)
self.flownets_1 = FlowNetS(with_bn=with_bn)
self.upsample2 = nn.Upsample(scale_factor=4, mode='bilinear')
self.resample2 = (nn.Sequential(tofp32(), Resample2d(), tofp16())
if fp16 else Resample2d())
# Block (FlowNetS2)
self.flownets_2 = FlowNetS(with_bn=with_bn)
# Block (FlowNetSD)
self.flownets_d = FlowNetSD(with_bn=with_bn)
self.upsample3 = nn.Upsample(scale_factor=4, mode='nearest')
self.upsample4 = nn.Upsample(scale_factor=4, mode='nearest')
self.resample3 = (nn.Sequential(tofp32(), Resample2d(), tofp16())
if fp16 else Resample2d())
self.resample4 = (nn.Sequential(tofp32(), Resample2d(), tofp16())
if fp16 else Resample2d())
# Block (FLowNetFusion)
self.flownetfusion = FlowNetFusion(with_bn=with_bn)
for m in self.modules():
if isinstance(m, nn.Conv2d):
if m.bias is not None:
nn_init.uniform(m.bias)
nn_init.xavier_uniform(m.weight)
if isinstance(m, nn.ConvTranspose2d):
if m.bias is not None:
nn_init.uniform(m.bias)
nn_init.xavier_uniform(m.weight)
def forward(self, inputs):
rgb_mean = inputs.contiguous().view(inputs.size()[:2] + (-1, )).mean(
dim=-1).view(inputs.size()[:2] + (1, 1, 1, ))
x = (inputs - rgb_mean) / self.rgb_max
x1 = x[:, :, 0, :, :]
x2 = x[:, :, 1, :, :]
x = torch.cat((x1, x2), dim=1)
# flownetc
flownetc_flow2 = self.flownetc(x)[0]
flownetc_flow = self.upsample1(flownetc_flow2 * self.div_flow)
# warp img1 to img0; magnitude of diff between img0 and and warped_img1,
resampled_img1 = self.resample1(x[:, 3:, :, :], flownetc_flow)
diff_img0 = x[:, :3, :, :] - resampled_img1
norm_diff_img0 = self.channelnorm(diff_img0)
# concat img0, img1, img1->img0, flow, diff-mag ;
concat1 = torch.cat(
[x, resampled_img1, flownetc_flow / self.div_flow, norm_diff_img0],
dim=1)
# flownets1
flownets1_flow2 = self.flownets_1(concat1)[0]
flownets1_flow = self.upsample2(flownets1_flow2 * self.div_flow)
# warp img1 to img0 using flownets1; magnitude of diff between img0 and and warped_img1
resampled_img1 = self.resample2(x[:, 3:, :, :], flownets1_flow)
diff_img0 = x[:, :3, :, :] - resampled_img1
norm_diff_img0 = self.channelnorm(diff_img0)
# concat img0, img1, img1->img0, flow, diff-mag
concat2 = torch.cat(
(x, resampled_img1, flownets1_flow / self.div_flow,
norm_diff_img0),
dim=1)
# flownets2
flownets2_flow2 = self.flownets_2(concat2)[0]
flownets2_flow = self.upsample4(flownets2_flow2 * self.div_flow)
norm_flownets2_flow = self.channelnorm(flownets2_flow)
diff_flownets2_flow = self.resample4(x[:, 3:, :, :], flownets2_flow)
req_grad = diff_flownets2_flow.requires_grad
if req_grad:
diff_flownets2_flow.register_hook(
save_grad(self.grads, 'diff_flownets2_flow'))
diff_flownets2_img1 = self.channelnorm(
(x[:, :3, :, :] - diff_flownets2_flow))
if req_grad:
diff_flownets2_img1.register_hook(
save_grad(self.grads, 'diff_flownets2_img1'))
# flownetsd
flownetsd_flow2 = self.flownets_d(x)[0]
flownetsd_flow = self.upsample3(flownetsd_flow2 / self.div_flow)
norm_flownetsd_flow = self.channelnorm(flownetsd_flow)
diff_flownetsd_flow = self.resample3(x[:, 3:, :, :], flownetsd_flow)
if req_grad:
diff_flownetsd_flow.register_hook(
save_grad(self.grads, 'diff_flownetsd_flow'))
diff_flownetsd_img1 = self.channelnorm(
(x[:, :3, :, :] - diff_flownetsd_flow))
if req_grad:
diff_flownetsd_img1.register_hook(
save_grad(self.grads, 'diff_flownetsd_img1'))
# concat img1 flownetsd, flownets2, norm_flownetsd, norm_flownets2,
# diff_flownetsd_img1, diff_flownets2_img1
concat3 = torch.cat(
(x[:, :3, :, :], flownetsd_flow, flownets2_flow,
norm_flownetsd_flow, norm_flownets2_flow, diff_flownetsd_img1,
diff_flownets2_img1),
dim=1)
flownetfusion_flow = self.flownetfusion(concat3)
if req_grad:
flownetfusion_flow.register_hook(
save_grad(self.grads, 'flownetfusion_flow'))
return flownetfusion_flow
class FlowNet2C(FlowNetC):
def __init__(self, with_bn=False, fp16=False, rgb_max=255., div_flow=20):
super(FlowNet2C, self).__init__(with_bn, fp16)
self.rgb_max = rgb_max
self.div_flow = div_flow
def forward(self, inputs):
rgb_mean = inputs.contiguous().view(inputs.size()[:2] + (-1, )).mean(
dim=-1).view(inputs.size()[:2] + (1, 1, 1, ))
x = (inputs - rgb_mean) / self.rgb_max
x1 = x[:, :, 0, :, :]
x2 = x[:, :, 1, :, :]
flows = super(FlowNet2C, self).forward(x1, x2)
if self.training:
return flows
else:
return self.upsample1(flows[0] * self.div_flow)
class FlowNet2S(FlowNetS):
def __init__(self, with_bn=False, rgb_max=255., div_flow=20):
super(FlowNet2S, self).__init__(input_channels=6, with_bn=with_bn)
self.rgb_max = rgb_max
self.div_flow = div_flow
def forward(self, inputs):
rgb_mean = inputs.contiguous().view(inputs.size()[:2] + (-1, )).mean(
dim=-1).view(inputs.size()[:2] + (1, 1, 1, ))
x = (inputs - rgb_mean) / self.rgb_max
x = torch.cat((x[:, :, 0, :, :], x[:, :, 1, :, :]), dim=1)
flows = super(FlowNet2S, self).forward(x)
if self.training:
return flows
else:
return self.upsample1(flows[0] * self.div_flow)
class FlowNet2SD(FlowNetSD):
def __init__(self, with_bn=False, rgb_max=255., div_flow=20):
super(FlowNet2SD, self).__init__(with_bn=with_bn)
self.rgb_max = rgb_max
self.div_flow = div_flow
def forward(self, inputs):
rgb_mean = inputs.contiguous().view(inputs.size()[:2] + (-1, )).mean(
dim=-1).view(inputs.size()[:2] + (1, 1, 1, ))
x = (inputs - rgb_mean) / self.rgb_max
x = torch.cat((x[:, :, 0, :, :], x[:, :, 1, :, :]), dim=1)
flows = super(FlowNet2SD, self).forward(x)
if self.training:
return flows
else:
return self.upsample1(flows[0] * self.div_flow)
class FlowNet2CS(nn.Module):
def __init__(self, with_bn=False, fp16=False, rgb_max=255., div_flow=20):
super(FlowNet2CS, self).__init__()
self.with_bn = with_bn
self.fp16 = fp16
self.rgb_max = rgb_max
self.div_flow = div_flow
self.channelnorm = ChannelNorm()
# First Block (FlowNetC)
self.flownetc = FlowNetC(with_bn=with_bn, fp16=fp16)
self.upsample1 = nn.Upsample(scale_factor=4, mode='bilinear')
self.resample1 = (nn.Sequential(tofp32(), Resample2d(), tofp16())
if fp16 else Resample2d())
# Block (FlowNetS1)
self.flownets_1 = FlowNetS(with_bn=with_bn)
self.upsample2 = nn.Upsample(scale_factor=4, mode='bilinear')
for m in self.modules():
if isinstance(m, nn.Conv2d):
if m.bias is not None:
nn_init.uniform(m.bias)
nn_init.xavier_uniform(m.weight)
if isinstance(m, nn.ConvTranspose2d):
if m.bias is not None:
nn_init.uniform(m.bias)
nn_init.xavier_uniform(m.weight)
def forward(self, inputs):
rgb_mean = inputs.contiguous().view(inputs.size()[:2] + (-1, )).mean(
dim=-1).view(inputs.size()[:2] + (1, 1, 1, ))
x = (inputs - rgb_mean) / self.rgb_max
x1 = x[:, :, 0, :, :]
x2 = x[:, :, 1, :, :]
x = torch.cat((x1, x2), dim=1)
# flownetc
flownetc_flow2 = self.flownetc(x)[0]
flownetc_flow = self.upsample1(flownetc_flow2 * self.div_flow)
# warp img1 to img0; magnitude of diff between img0 and and warped_img1,
resampled_img1 = self.resample1(x[:, 3:, :, :], flownetc_flow)
diff_img0 = x[:, :3, :, :] - resampled_img1
norm_diff_img0 = self.channelnorm(diff_img0)
# concat img0, img1, img1->img0, flow, diff-mag ;
concat1 = torch.cat(
[x, resampled_img1, flownetc_flow / self.div_flow, norm_diff_img0],
dim=1)
# flownets1
flownets1_flow2 = self.flownets_1(concat1)[0]
flownets1_flow = self.upsample2(flownets1_flow2 * self.div_flow)
return flownets1_flow
class FlowNet2CSS(nn.Module):
def __init__(self, with_bn=False, fp16=False, rgb_max=255., div_flow=20):
super(FlowNet2CSS, self).__init__()
self.with_bn = with_bn
self.fp16 = fp16
self.rgb_max = rgb_max
self.div_flow = div_flow
self.channelnorm = ChannelNorm()
# First Block (FlowNetC)
self.flownetc = FlowNetC(with_bn=with_bn, fp16=fp16)
self.upsample1 = nn.Upsample(scale_factor=4, mode='bilinear')
if fp16:
self.resample1 = nn.Sequential(tofp32(), Resample2d(), tofp16())
else:
self.resample1 = Resample2d()
# Block (FlowNetS1)
self.flownets_1 = FlowNetS(with_bn=with_bn)
self.upsample2 = nn.Upsample(scale_factor=4, mode='bilinear')
if fp16:
self.resample2 = nn.Sequential(tofp32(), Resample2d(), tofp16())
else:
self.resample2 = Resample2d()
# Block (FlowNetS2)
self.flownets_2 = FlowNetS(with_bn=with_bn)
self.upsample3 = nn.Upsample(scale_factor=4, mode='nearest')
for m in self.modules():
if isinstance(m, nn.Conv2d):
if m.bias is not None:
nn_init.uniform(m.bias)
nn_init.xavier_uniform(m.weight)
if isinstance(m, nn.ConvTranspose2d):
if m.bias is not None:
nn_init.uniform(m.bias)
nn_init.xavier_uniform(m.weight)
def forward(self, inputs):
rgb_mean = inputs.contiguous().view(inputs.size()[:2] + (-1, )).mean(
dim=-1).view(inputs.size()[:2] + (1, 1, 1, ))
x = (inputs - rgb_mean) / self.rgb_max
x1 = x[:, :, 0, :, :]
x2 = x[:, :, 1, :, :]
x = torch.cat((x1, x2), dim=1)
# flownetc
flownetc_flow2 = self.flownetc(x)[0]
flownetc_flow = self.upsample1(flownetc_flow2 * self.div_flow)
# warp img1 to img0; magnitude of diff between img0 and and warped_img1,
resampled_img1 = self.resample1(x[:, 3:, :, :], flownetc_flow)
diff_img0 = x[:, :3, :, :] - resampled_img1
norm_diff_img0 = self.channelnorm(diff_img0)
# concat img0, img1, img1->img0, flow, diff-mag ;
concat1 = torch.cat(
[x, resampled_img1, flownetc_flow / self.div_flow, norm_diff_img0],
dim=1)
# flownets1
flownets1_flow2 = self.flownets_1(concat1)[0]
flownets1_flow = self.upsample2(flownets1_flow2 * self.div_flow)
# warp img1 to img0 using flownets1; magnitude of diff between img0 and and warped_img1
resampled_img1 = self.resample2(x[:, 3:, :, :], flownets1_flow)
diff_img0 = x[:, :3, :, :] - resampled_img1
norm_diff_img0 = self.channelnorm(diff_img0)
# concat img0, img1, img1->img0, flow, diff-mag
concat2 = torch.cat(
(x, resampled_img1, flownets1_flow / self.div_flow,
norm_diff_img0),
dim=1)
# flownets2
flownets2_flow2 = self.flownets_2(concat2)[0]
flownets2_flow = self.upsample3(flownets2_flow2 * self.div_flow)
return flownets2_flow
| 35.980926
| 95
| 0.586066
| 1,621
| 13,205
| 4.550895
| 0.076496
| 0.030094
| 0.034296
| 0.0244
| 0.832994
| 0.811577
| 0.790972
| 0.761963
| 0.727125
| 0.719805
| 0
| 0.050383
| 0.289057
| 13,205
| 366
| 96
| 36.079235
| 0.735407
| 0.079515
| 0
| 0.722892
| 0
| 0
| 0.013447
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048193
| false
| 0
| 0.024096
| 0
| 0.13253
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55ca89ed1b7823a71da797e2766d4cbf3f748a46
| 3,256
|
py
|
Python
|
basic codes/project_shiva/Basic_cv/pixel.py
|
MachineLearningWithHuman/ComputerVision
|
9929a3115241067da2dd4bcbdd628d4c78fa8072
|
[
"Apache-2.0"
] | 3
|
2019-07-10T15:29:59.000Z
|
2020-06-15T17:10:15.000Z
|
basic codes/project_shiva/Basic_cv/pixel.py
|
MachineLearningWithHuman/ComputerVision
|
9929a3115241067da2dd4bcbdd628d4c78fa8072
|
[
"Apache-2.0"
] | null | null | null |
basic codes/project_shiva/Basic_cv/pixel.py
|
MachineLearningWithHuman/ComputerVision
|
9929a3115241067da2dd4bcbdd628d4c78fa8072
|
[
"Apache-2.0"
] | 1
|
2020-06-15T16:27:44.000Z
|
2020-06-15T16:27:44.000Z
|
# import the necessary packages
import argparse
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="Path to the image")
args = vars(ap.parse_args())
# load the image, grab its dimensions, and show it
image = cv2.imread(args["image"])
(h, w) = image.shape[:2]
# import the necessary packages
import argparse
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="Path to the image")
args = vars(ap.parse_args())
# load the image, grab its dimensions, and show it
image = cv2.imread(args["image"])
(h, w) = image.shape[:2]
cv2.imshow("Original", image)
# images are just NumPy arrays. The top-left pixel can be found at (0, 0)
(b, g, r) = image[0, 0]
print("Pixel at (0, 0) - Red: {r}, Green: {g}, Blue: {b}".format(r=r, g=g, b=b))
# now, let's change the value of the pixel at (0, 0) and make it red
image[0, 0] = (0, 0, 255)
(b, g, r) = image[0, 0]
print("Pixel at (0, 0) - Red: {r}, Green: {g}, Blue: {b}".format(r=r, g=g, b=b)
# compute the center of the image, which is simply the width and height
# divided by two
(cX, cY) = (w // 2, h // 2)
# since we are using NumPy arrays, we can apply slicing and grab large chunks
# of the image -- let's grab the top-left corner
tl = image[0:cY, 0:cX]
cv2.imshow("Top-Left Corner", tl)
# in a similar fashion, let's grab the top-right, bottom-right, and bottom-left
# corners and display them
tr = image[0:cY, cX:w]
br = image[cY:h, cX:w]
bl = image[cY:h, 0:cX]
cv2.imshow("Top-Right Corner", tr)
cv2.imshow("Bottom-Right Corner", br)
cv2.imshow("Bottom-Left Corner", bl)
# import the necessary packages
import argparse
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="Path to the image")
args = vars(ap.parse_args())
# load the image, grab its dimensions, and show it
image = cv2.imread(args["image"])
(h, w) = image.shape[:2]
cv2.imshow("Original", image)
# images are just NumPy arrays. The top-left pixel can be found at (0, 0)
(b, g, r) = image[0, 0]
print("Pixel at (0, 0) - Red: {r}, Green: {g}, Blue: {b}".format(r=r, g=g, b=b))
# now, let's change the value of the pixel at (0, 0) and make it red
image[0, 0] = (0, 0, 255)
(b, g, r) = image[0, 0]
print("Pixel at (0, 0) - Red: {r}, Green: {g}, Blue: {b}".format(r=r, g=g, b=b))
# compute the center of the image, which is simply the width and height
# divided by two
(cX, cY) = (w // 2, h // 2)
# since we are using NumPy arrays, we can apply slicing and grab large chunks
# of the image -- let's grab the top-left corner
tl = image[0:cY, 0:cX]
cv2.imshow("Top-Left Corner", tl)
# in a similar fashion, let's grab the top-right, bottom-right, and bottom-left
# corners and display them
tr = image[0:cY, cX:w]
br = image[cY:h, cX:w]
bl = image[cY:h, 0:cX]
cv2.imshow("Top-Right Corner", tr)
cv2.imshow("Bottom-Right Corner", br)
cv2.imshow("Bottom-Left Corner", bl)
# now let's make the top-left corner of the original image red
image[0:cY, 0:cX] = (0, 0, 255)
# Show our updated image
cv2.imshow("Updated", image)
cv2.waitKey(0)
| 31.61165
| 80
| 0.673219
| 601
| 3,256
| 3.637271
| 0.169717
| 0.017383
| 0.014639
| 0.024703
| 0.952425
| 0.947393
| 0.947393
| 0.947393
| 0.947393
| 0.947393
| 0
| 0.029108
| 0.166462
| 3,256
| 102
| 81
| 31.921569
| 0.776345
| 0
| 0
| 0.925926
| 0
| 0.074074
| 0.24295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.111111
| null | null | 0.074074
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
36aed7b8b9567a3cc5d8ec9afc96b894f66dc501
| 7,878
|
py
|
Python
|
tests/frameworks/test_celery.py
|
rlopes-ki/python-sensor
|
07e827f9982b2a0c482e8eab82d1a420923efd5e
|
[
"MIT"
] | 61
|
2017-09-27T02:50:17.000Z
|
2022-03-22T12:13:37.000Z
|
tests/frameworks/test_celery.py
|
rlopes-ki/python-sensor
|
07e827f9982b2a0c482e8eab82d1a420923efd5e
|
[
"MIT"
] | 82
|
2017-07-11T13:47:33.000Z
|
2022-03-22T10:10:38.000Z
|
tests/frameworks/test_celery.py
|
rlopes-ki/python-sensor
|
07e827f9982b2a0c482e8eab82d1a420923efd5e
|
[
"MIT"
] | 27
|
2017-09-11T16:22:32.000Z
|
2022-03-11T17:21:49.000Z
|
# (c) Copyright IBM Corp. 2021
# (c) Copyright Instana Inc. 2020
from __future__ import absolute_import
import time
from celery import shared_task
from instana.singletons import tracer
from ..helpers import get_first_span_by_filter
# TODO: Refactor to class based tests
@shared_task
def add(x, y):
return x + y
@shared_task
def will_raise_error():
raise Exception('This is a simulated error')
def filter_out_ping_tasks(spans):
filtered_spans = []
for span in spans:
is_ping_task = (span.n == 'celery-worker' and span.data['celery']['task'] == 'celery.ping')
if not is_ping_task:
filtered_spans.append(span)
return filtered_spans
def setup_method():
""" Clear all spans before a test run """
tracer.recorder.clear_spans()
def test_apply_async(celery_app, celery_worker):
result = None
with tracer.start_active_span('test'):
result = add.apply_async(args=(4, 5))
# Wait for jobs to finish
time.sleep(0.5)
spans = filter_out_ping_tasks(tracer.recorder.queued_spans())
assert len(spans) == 3
filter = lambda span: span.n == "sdk"
test_span = get_first_span_by_filter(spans, filter)
assert(test_span)
filter = lambda span: span.n == "celery-client"
client_span = get_first_span_by_filter(spans, filter)
assert(client_span)
filter = lambda span: span.n == "celery-worker"
worker_span = get_first_span_by_filter(spans, filter)
assert(worker_span)
assert(client_span.t == test_span.t)
assert(client_span.t == worker_span.t)
assert(client_span.p == test_span.s)
assert("tests.frameworks.test_celery.add" == client_span.data["celery"]["task"])
assert("redis" == client_span.data["celery"]["scheme"])
assert("localhost" == client_span.data["celery"]["host"])
assert("6379" == client_span.data["celery"]["port"])
assert(client_span.data["celery"]["task_id"])
assert(client_span.data["celery"]["error"] == None)
assert(client_span.ec == None)
assert("tests.frameworks.test_celery.add" == worker_span.data["celery"]["task"])
assert("redis" == worker_span.data["celery"]["scheme"])
assert("localhost" == worker_span.data["celery"]["host"])
assert("6379" == worker_span.data["celery"]["port"])
assert(worker_span.data["celery"]["task_id"])
assert(worker_span.data["celery"]["error"] == None)
assert(worker_span.data["celery"]["retry-reason"] == None)
assert(worker_span.ec == None)
def test_delay(celery_app, celery_worker):
result = None
with tracer.start_active_span('test'):
result = add.delay(4, 5)
# Wait for jobs to finish
time.sleep(0.5)
spans = filter_out_ping_tasks(tracer.recorder.queued_spans())
assert len(spans) == 3
filter = lambda span: span.n == "sdk"
test_span = get_first_span_by_filter(spans, filter)
assert(test_span)
filter = lambda span: span.n == "celery-client"
client_span = get_first_span_by_filter(spans, filter)
assert(client_span)
filter = lambda span: span.n == "celery-worker"
worker_span = get_first_span_by_filter(spans, filter)
assert(worker_span)
assert(client_span.t == test_span.t)
assert(client_span.t == worker_span.t)
assert(client_span.p == test_span.s)
assert("tests.frameworks.test_celery.add" == client_span.data["celery"]["task"])
assert("redis" == client_span.data["celery"]["scheme"])
assert("localhost" == client_span.data["celery"]["host"])
assert("6379" == client_span.data["celery"]["port"])
assert(client_span.data["celery"]["task_id"])
assert(client_span.data["celery"]["error"] == None)
assert(client_span.ec == None)
assert("tests.frameworks.test_celery.add" == worker_span.data["celery"]["task"])
assert("redis" == worker_span.data["celery"]["scheme"])
assert("localhost" == worker_span.data["celery"]["host"])
assert("6379" == worker_span.data["celery"]["port"])
assert(worker_span.data["celery"]["task_id"])
assert(worker_span.data["celery"]["error"] == None)
assert(worker_span.data["celery"]["retry-reason"] == None)
assert(worker_span.ec == None)
def test_send_task(celery_app, celery_worker):
result = None
with tracer.start_active_span('test'):
result = celery_app.send_task('tests.frameworks.test_celery.add', (1, 2))
# Wait for jobs to finish
time.sleep(0.5)
spans = filter_out_ping_tasks(tracer.recorder.queued_spans())
assert len(spans) == 3
filter = lambda span: span.n == "sdk"
test_span = get_first_span_by_filter(spans, filter)
assert(test_span)
filter = lambda span: span.n == "celery-client"
client_span = get_first_span_by_filter(spans, filter)
assert(client_span)
filter = lambda span: span.n == "celery-worker"
worker_span = get_first_span_by_filter(spans, filter)
assert(worker_span)
assert(client_span.t == test_span.t)
assert(client_span.t == worker_span.t)
assert(client_span.p == test_span.s)
assert("tests.frameworks.test_celery.add" == client_span.data["celery"]["task"])
assert("redis" == client_span.data["celery"]["scheme"])
assert("localhost" == client_span.data["celery"]["host"])
assert("6379" == client_span.data["celery"]["port"])
assert(client_span.data["celery"]["task_id"])
assert(client_span.data["celery"]["error"] == None)
assert(client_span.ec == None)
assert("tests.frameworks.test_celery.add" == worker_span.data["celery"]["task"])
assert("redis" == worker_span.data["celery"]["scheme"])
assert("localhost" == worker_span.data["celery"]["host"])
assert("6379" == worker_span.data["celery"]["port"])
assert(worker_span.data["celery"]["task_id"])
assert(worker_span.data["celery"]["error"] == None)
assert(worker_span.data["celery"]["retry-reason"] == None)
assert(worker_span.ec == None)
def test_error_reporting(celery_app, celery_worker):
result = None
with tracer.start_active_span('test'):
result = will_raise_error.apply_async()
# Wait for jobs to finish
time.sleep(0.5)
spans = filter_out_ping_tasks(tracer.recorder.queued_spans())
assert len(spans) == 4
filter = lambda span: span.n == "sdk"
test_span = get_first_span_by_filter(spans, filter)
assert(test_span)
filter = lambda span: span.n == "celery-client"
client_span = get_first_span_by_filter(spans, filter)
assert(client_span)
filter = lambda span: span.n == "log"
log_span = get_first_span_by_filter(spans, filter)
assert(log_span)
filter = lambda span: span.n == "celery-worker"
worker_span = get_first_span_by_filter(spans, filter)
assert(worker_span)
assert(client_span.t == test_span.t)
assert(client_span.t == worker_span.t)
assert(client_span.t == log_span.t)
assert(client_span.p == test_span.s)
assert(worker_span.p == client_span.s)
assert(log_span.p == worker_span.s)
assert("tests.frameworks.test_celery.will_raise_error" == client_span.data["celery"]["task"])
assert("redis" == client_span.data["celery"]["scheme"])
assert("localhost" == client_span.data["celery"]["host"])
assert("6379" == client_span.data["celery"]["port"])
assert(client_span.data["celery"]["task_id"])
assert(client_span.data["celery"]["error"] == None)
assert(client_span.ec == None)
assert("tests.frameworks.test_celery.will_raise_error" == worker_span.data["celery"]["task"])
assert("redis" == worker_span.data["celery"]["scheme"])
assert("localhost" == worker_span.data["celery"]["host"])
assert("6379" == worker_span.data["celery"]["port"])
assert(worker_span.data["celery"]["task_id"])
assert(worker_span.data["celery"]["error"] == 'This is a simulated error')
assert(worker_span.data["celery"]["retry-reason"] == None)
assert(worker_span.ec == 1)
| 35.169643
| 99
| 0.678979
| 1,091
| 7,878
| 4.664528
| 0.098992
| 0.083317
| 0.145805
| 0.110041
| 0.871881
| 0.854195
| 0.853999
| 0.853016
| 0.841423
| 0.833366
| 0
| 0.00891
| 0.159431
| 7,878
| 223
| 100
| 35.327354
| 0.759589
| 0.028941
| 0
| 0.771605
| 0
| 0
| 0.167234
| 0.041121
| 0
| 0
| 0
| 0.004484
| 0.567901
| 1
| 0.049383
| false
| 0
| 0.030864
| 0.006173
| 0.092593
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d08d5f8ae54af49dee5d5d82bb5aedfb597ac0c
| 10,459
|
py
|
Python
|
HoundSploit/searcher/engine/filter_query.py
|
nicolas-carolo/houndsplo
|
a44b02559588ec2ae44af3529cc8a58371fa15c8
|
[
"BSD-3-Clause"
] | 85
|
2019-12-18T08:11:51.000Z
|
2022-02-25T05:45:48.000Z
|
HoundSploit/searcher/engine/filter_query.py
|
juan157/houndsploit
|
12210481d8fa5880265e4b342f816a53d93e4637
|
[
"BSD-3-Clause"
] | 2
|
2020-04-21T13:33:14.000Z
|
2020-04-30T12:39:50.000Z
|
HoundSploit/searcher/engine/filter_query.py
|
juan157/houndsploit
|
12210481d8fa5880265e4b342f816a53d93e4637
|
[
"BSD-3-Clause"
] | 11
|
2020-04-20T09:49:30.000Z
|
2022-02-01T15:29:17.000Z
|
from pkg_resources import parse_version
from HoundSploit.searcher.engine.version_comparator import get_num_version_with_comparator, get_num_version,\
is_in_version_range_with_x, is_equal_with_x, is_in_version_range, is_lte_with_comparator_x
from HoundSploit.searcher.engine.string import str_contains_num_version_range_with_x, str_contains_num_version_range
import datetime
def filter_exploits_without_comparator(exploit, num_version, software_name, final_result_set):
"""
Add the exploit (without comparator) to the final_result_set if respect the condition set by the user.
:param exploit: the exploit we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if not exploit.description.__contains__('.x'):
# exclude the exploit from results table if the number of version is not equal and contains 'x'
try:
if parse_version(num_version) == parse_version(get_num_version(software_name, exploit.description)):
final_result_set.append(exploit)
except TypeError:
pass
else:
# exclude the exploit from results table if the number of version is not equal and not contains 'x'
try:
if is_equal_with_x(num_version, get_num_version(software_name, exploit.description)):
final_result_set.append(exploit)
except TypeError:
pass
return final_result_set
def filter_exploits_with_comparator(exploit, num_version, software_name, final_result_set):
"""
Add the exploit (with comparator) to the final_result_set if respect the condition set by the user.
:param exploit: the exploit we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if not exploit.description.__contains__('.x'):
final_result_set = filter_exploits_with_comparator_and_without_x(exploit, num_version, software_name, final_result_set)
else:
final_result_set = filter_exploits_with_comparator_and_x(exploit, num_version, software_name, final_result_set)
return final_result_set
def filter_exploits_with_comparator_and_without_x(exploit, num_version, software_name, final_result_set):
"""
Add exploit (with comparator and without the x in number version) to the final_result_set if respect the condition set by the user.
:param exploit: the exploit we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if str_contains_num_version_range(str(exploit.description)):
if is_in_version_range(num_version, software_name, exploit.description):
final_result_set.append(exploit)
else:
try:
if parse_version(num_version) <= parse_version(
get_num_version_with_comparator(software_name, exploit.description)):
final_result_set.append(exploit)
except TypeError:
pass
return final_result_set
def filter_exploits_with_comparator_and_x(exploit, num_version, software_name, final_result_set):
"""
Add exploit (with comparator and x in the number version) to the final_result_set if respect the condition set by the user.
:param exploit: the exploit we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if str_contains_num_version_range_with_x(str(exploit.description)):
if is_in_version_range_with_x(num_version, software_name, exploit.description):
final_result_set.append(exploit)
else:
try:
if is_lte_with_comparator_x(num_version, software_name, exploit.description):
final_result_set.append(exploit)
except TypeError:
pass
return final_result_set
def filter_shellcodes_without_comparator(shellcode, num_version, software_name, final_result_set):
"""
Add the shellcode (without comparator) to the final_result_set if respect the condition set by the user.
:param shellcode: the shellcode we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if not shellcode.description.__contains__('.x'):
# exclude the exploit from results table if the number of version is not equal and contains 'x'
try:
if parse_version(num_version) == parse_version(get_num_version(software_name, shellcode.description)):
final_result_set.append(shellcode)
except TypeError:
pass
else:
# exclude the exploit from results table if the number of version is not equal and not contains 'x'
try:
if is_equal_with_x(num_version, get_num_version(software_name, shellcode.description)):
final_result_set.append(shellcode)
except TypeError:
pass
return final_result_set
def filter_shellcodes_with_comparator(shellcode, num_version, software_name, final_result_set):
"""
Add the shellcode (with comparator) to the final_result_set if respect the condition set by the user.
:param shellcode: the shellcode we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if not shellcode.description.__contains__('.x'):
final_result_set = filter_shellcodes_with_comparator_and_without_x(shellcode, num_version, software_name, final_result_set)
else:
final_result_set = filter_shellcodes_with_comparator_and_x(shellcode, num_version, software_name, final_result_set)
return final_result_set
def filter_shellcodes_with_comparator_and_without_x(shellcode, num_version, software_name, final_result_set):
"""
Add the shellcode (with comparator and without x) to the final_result_set if respect the condition set by the user.
:param shellcode: the shellcode we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if str_contains_num_version_range(str(shellcode.description)):
if is_in_version_range(num_version, software_name, shellcode.description):
final_result_set.append(shellcode)
else:
try:
if parse_version(num_version) <= parse_version(
get_num_version_with_comparator(software_name, shellcode.description)):
final_result_set.append(shellcode)
except TypeError:
pass
return final_result_set
def filter_shellcodes_with_comparator_and_x(shellcode, num_version, software_name, final_result_set):
"""
Add the shellcode (with comparator and x) to the final_result_set if respect the condition set by the user.
:param shellcode: the shellcode we have to check if it has a number of version that matches the value passed by
the user.
:param num_version: the number of version searched by the user.
:param software_name: the name of the software searched by the user.
:param final_result_set: the result set that
:return: the result set that
"""
if str_contains_num_version_range_with_x(str(shellcode.description)):
if is_in_version_range_with_x(num_version, software_name, shellcode.description):
final_result_set.append(shellcode)
else:
try:
if is_lte_with_comparator_x(num_version, software_name, shellcode.description):
final_result_set.append(shellcode)
except TypeError:
pass
return final_result_set
def filter_vulnerabilities_for_author(input_list, author_filter):
output_list = []
for vulnerability in input_list:
if vulnerability.author == author_filter:
output_list.append(vulnerability)
return output_list
def filter_vulnerabilities_for_type(input_list, type_filter):
output_list = []
for vulnerability in input_list:
if vulnerability.type == type_filter:
output_list.append(vulnerability)
return output_list
def filter_vulnerabilities_for_platform(input_list, platform_filter):
output_list = []
for vulnerability in input_list:
if vulnerability.platform == platform_filter:
output_list.append(vulnerability)
return output_list
def filter_exploits_for_port(input_list, port_filter):
output_list = []
for vulnerability in input_list:
if vulnerability.port == port_filter:
output_list.append(vulnerability)
return output_list
def filter_vulnerabilities_for_date_range(input_list, date_from, date_to):
output_list = []
for vulnerability in input_list:
if date_from < datetime.datetime.strptime(vulnerability.date, '%Y-%m-%d') < date_to:
output_list.append(vulnerability)
return output_list
| 46.484444
| 135
| 0.721484
| 1,453
| 10,459
| 4.917412
| 0.0585
| 0.085654
| 0.101889
| 0.062701
| 0.93352
| 0.922743
| 0.919804
| 0.913506
| 0.894892
| 0.894892
| 0
| 0
| 0.223731
| 10,459
| 224
| 136
| 46.691964
| 0.880034
| 0.385697
| 0
| 0.644628
| 0
| 0
| 0.00262
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107438
| false
| 0.066116
| 0.033058
| 0
| 0.247934
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3d097cf3188da140b6fb04036ad7d159230f257a
| 174
|
py
|
Python
|
fzw/news/helpers.py
|
fajnie-ze-wiesz/fzw-backend
|
eb7942bbf884a1269cfe0ad336187ffa979b4d12
|
[
"MIT"
] | 1
|
2018-03-31T14:07:28.000Z
|
2018-03-31T14:07:28.000Z
|
fzw/news/helpers.py
|
fajnie-ze-wiesz/fzw-backend
|
eb7942bbf884a1269cfe0ad336187ffa979b4d12
|
[
"MIT"
] | 2
|
2020-06-06T06:27:44.000Z
|
2020-12-23T14:13:50.000Z
|
fzw/news/helpers.py
|
fajnie-ze-wiesz/fzw-backend
|
eb7942bbf884a1269cfe0ad336187ffa979b4d12
|
[
"MIT"
] | null | null | null |
import markdown # type: ignore
from fzw.news.models import News
def get_answer_explanation_html(news: News) -> str:
return markdown.markdown(news.answer_explanation)
| 21.75
| 53
| 0.781609
| 24
| 174
| 5.5
| 0.625
| 0.257576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 174
| 7
| 54
| 24.857143
| 0.88
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
3d1b36ec24afcf6abe243b5a38edf9692db59bf5
| 171
|
py
|
Python
|
trtools/dumpSTR/__init__.py
|
ileenamitra/TRTools
|
3982185399abe7a6a81a0dd917418bf571562a8e
|
[
"MIT"
] | 14
|
2020-04-20T15:38:52.000Z
|
2022-02-07T11:45:23.000Z
|
trtools/dumpSTR/__init__.py
|
ileenamitra/TRTools
|
3982185399abe7a6a81a0dd917418bf571562a8e
|
[
"MIT"
] | 74
|
2020-03-02T23:34:53.000Z
|
2022-03-21T18:32:10.000Z
|
trtools/dumpSTR/__init__.py
|
ileenamitra/TRTools
|
3982185399abe7a6a81a0dd917418bf571562a8e
|
[
"MIT"
] | 15
|
2018-10-29T19:41:33.000Z
|
2020-02-21T18:41:51.000Z
|
# expose the code in the file dumpSTR/dumpSTR.py
# through the statement import trtools.dumpSTR
# instead of through import trtools.dumpSTR.dumpSTR
from .dumpSTR import *
| 34.2
| 51
| 0.80117
| 25
| 171
| 5.48
| 0.56
| 0.20438
| 0.291971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 171
| 4
| 52
| 42.75
| 0.931973
| 0.824561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3d26f0bb9719aa8d466a7264fa607bedee6c97a2
| 363
|
py
|
Python
|
arginfer/__init__.py
|
JereKoskela/arginfer
|
3dd7a4d8fb22eff20573a312638055dfcda2ff85
|
[
"MIT"
] | 2
|
2022-02-04T07:58:35.000Z
|
2022-03-15T04:46:31.000Z
|
arginfer/__init__.py
|
JereKoskela/arginfer
|
3dd7a4d8fb22eff20573a312638055dfcda2ff85
|
[
"MIT"
] | 2
|
2021-03-17T05:18:14.000Z
|
2021-08-17T17:02:10.000Z
|
arginfer/__init__.py
|
JereKoskela/arginfer
|
3dd7a4d8fb22eff20573a312638055dfcda2ff85
|
[
"MIT"
] | 1
|
2021-11-01T11:20:29.000Z
|
2021-11-01T11:20:29.000Z
|
from arginfer.argbook import * # NOQA: F401, F403
from arginfer.treeSequence import * # NOQA: F401, F403
from arginfer.initialARG import * # NOQA: F401, F403
from arginfer.mcmc import infer_sim # NOQA: F401
from arginfer.mcmc import infer_real # NOQA: F401
from arginfer.plots import * # NOQA: F401, F403
from arginfer.provenance import __version__ # NOQA: F401
| 45.375
| 57
| 0.77135
| 51
| 363
| 5.372549
| 0.313725
| 0.306569
| 0.20438
| 0.262774
| 0.591241
| 0.437956
| 0
| 0
| 0
| 0
| 0
| 0.106796
| 0.14876
| 363
| 7
| 58
| 51.857143
| 0.779935
| 0.278237
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
186af474b1798fc436d49329b1d2f271b342447d
| 92,037
|
py
|
Python
|
applications/inheritance/inheritance_test.py
|
carterej1989/acitoolkit
|
8bc1e462c3bc0b6643004033e353520d438242d6
|
[
"Apache-2.0"
] | null | null | null |
applications/inheritance/inheritance_test.py
|
carterej1989/acitoolkit
|
8bc1e462c3bc0b6643004033e353520d438242d6
|
[
"Apache-2.0"
] | 2
|
2018-05-07T19:40:50.000Z
|
2020-04-02T14:43:15.000Z
|
applications/inheritance/inheritance_test.py
|
carterej1989/acitoolkit
|
8bc1e462c3bc0b6643004033e353520d438242d6
|
[
"Apache-2.0"
] | null | null | null |
"""
Inheritance test suite
"""
import unittest
from inheritance import execute_tool
from acitoolkit import (Tenant, Context, OutsideL3, OutsideEPG, OutsideNetwork,
Contract, FilterEntry, Session, AppProfile, EPG,
ContractInterface, Fabric)
import time
import sys
import logging
from logging.handlers import RotatingFileHandler
import argparse
from os import getpid
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
DEFAULT_INI_FILENAME = 'inheritance_apic_credentials.ini'
class ApicCredentials(object):
"""
Class to collect the APIC credentials from an configuration file
"""
def __init__(self):
self._config = None
self._username = None
self._password = None
self._url = None
self._ip_address = None
def set_config(self, filename):
"""
Set the configuration file name
:param filename: String containing the configuration file name
:return: None
"""
if filename is None:
return
self._config = ConfigParser()
self._config.read(filename)
def _get_attribute(self, attr_name):
"""
Get the requested configuration attribute
:param attr_name: String containing the attribute name
:return: String containing the requested configuration attribute
:raises: ValueError: An error occurred accessing the requested configuration attribute
"""
try:
return self._config.get('Credentials', attr_name)
except AttributeError:
raise ValueError('Credentials configuration file not found')
except(NoSectionError, NoOptionError):
raise ValueError('Requested credential attribute not present')
@property
def username(self):
"""
APIC username
:return: String containing APIC username
"""
return self._get_attribute('Username')
@property
def password(self):
"""
APIC password
:return: String containing APIC password
"""
return self._get_attribute('Password')
@property
def url(self):
"""
APIC URL
:return: String containing APIC URL
"""
return self._get_attribute('URL')
@property
def ip_address(self):
"""
APIC IP address as parsed from the URL
:return: String containing APIC IP address
"""
return self.url.partition('://')[-1].split('/')[0]
class TestArgs(object):
"""
Fake class to mock out Command line arguments
"""
def __init__(self):
self.debug = 'verbose'
self.maxlogfiles = 10
self.generateconfig = False
class FakeStdio(object):
"""
FakeStdio : Class to fake writing to stdio and store it so that it can be verified
"""
def __init__(self):
self.output = []
def write(self, *args, **kwargs):
"""
Mock the write routine
:param args: Args passed to stdio write
:param kwargs: Kwargs passed to stdio write
:return: None
"""
for arg in args:
self.output.append(arg)
def verify_output(self, output):
"""
Verify that the output is the same as generated previously
:param output: Output to test for
:return: True if the same as the stored output. False otherwise
"""
return output == self.output
class BaseTestCase(unittest.TestCase):
"""
Base class for the various test cases
"""
def delete_tenant(self):
"""
Delete the tenant config. Called before and after test
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
tenant.mark_as_deleted()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(4)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
tenants = Tenant.get(apic)
for tenant in tenants:
self.assertTrue(tenant.name != 'inheritanceautomatedtest')
def setUp(self):
self.delete_tenant()
def tearDown(self):
self.delete_tenant()
class TestWithoutApicCommunication(unittest.TestCase):
"""
Tests that do not communicate with the APIC
"""
def test_generate_config(self):
"""
Generate the test configuration
"""
args = TestArgs()
args.generateconfig = True
sample_config = """
{
"apic": {
"user_name": "admin",
"password": "password",
"ip_address": "0.0.0.0",
"use_https": false
},
"inheritance_policies": [
{
"epg": {
"tenant": "tenant-name",
"epg_container": {
"name": "l3out-name",
"container_type": "l3out"
},
"name": "epg-name"
},
"allowed": true,
"enabled": true
},
{
"epg": {
"tenant": "tenant-name",
"epg_container": {
"name": "l3out-name",
"container_type": "l3out"
},
"name": "epg-name"
},
"allowed": true,
"enabled": true
},
]
}
"""
temp = sys.stdout
fake_out = FakeStdio()
sys.stdout = fake_out
tool = execute_tool(args)
sys.stdout = temp
self.assertTrue(fake_out.verify_output([sample_config, '\n']))
class BaseBasicL3Out(BaseTestCase):
"""
Base class for basic Inheritance test cases enabled on OutsideEPGs
"""
def setup_tenant(self, apic):
"""
Setup the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
context = Context('mycontext', tenant)
l3out = OutsideL3('myl3out', tenant)
parent_epg = OutsideEPG('parentepg', l3out)
parent_network = OutsideNetwork('5.1.1.1', parent_epg)
parent_network.ip = '5.1.1.1/8'
child_epg = OutsideEPG('childepg', l3out)
child_network = OutsideNetwork('5.2.1.1', child_epg)
child_network.ip = '5.2.1.1/16'
contract = Contract('mycontract', tenant)
parent_epg.provide(contract)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def verify_inherited(self, apic, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
l3out = tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
childepg = l3out.get_child(OutsideEPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract'))
contract = tenant.get_child(Contract, 'mycontract')
self.assertIsNotNone(contract)
if not_inherited:
self.assertFalse(childepg.does_provide(contract))
else:
self.assertTrue(childepg.does_provide(contract))
def verify_not_inherited(self, apic):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
self.verify_inherited(apic, not_inherited=True)
class TestBasicL3Out(BaseBasicL3Out):
"""
Basic Inheritance test cases enabled on OutsideEPGs
"""
def test_basic_inherit_contract(self):
"""
Basic inherit contract test
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(4)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
tool.exit()
# self.delete_tenant()
def test_basic_inheritance_disallowed(self):
"""
Basic test for when inheritance is disallowed
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": False,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_not_inherited(apic)
# self.delete_tenant()
tool.exit()
def test_basic_inheritance_disabled(self):
"""
Basic test for when inheritance is disabled
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": False
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_not_inherited(apic)
tool.exit()
# self.delete_tenant()
def test_get_config(self):
"""
Basic test for getting the configuration
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": False
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
config = tool.get_config()
# Verify that the contract is now inherited by the child EPG
self.assertEqual(config, config_json)
tool.exit()
class TestBasicL3OutWithInheritFrom(BaseBasicL3Out):
"""
Basic Inheritance test cases enabled on OutsideEPGs that also use the inherit_from clause
"""
def setup_tenant(self, apic):
"""
Setup the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
app = AppProfile('myapp', tenant)
epg = EPG('myepg', app)
contract = Contract('mycontract-app', tenant)
epg.provide(contract)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
super(TestBasicL3OutWithInheritFrom, self).setup_tenant(apic)
def verify_inherited(self, apic, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
l3out = tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
childepg = l3out.get_child(OutsideEPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract'))
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract-app'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract'))
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract-app'))
for contract_name in ['mycontract', 'mycontract-app']:
contract = tenant.get_child(Contract, contract_name)
self.assertIsNotNone(contract)
if not_inherited:
self.assertFalse(childepg.does_provide(contract))
else:
self.assertTrue(childepg.does_provide(contract))
def test_basic_inherit_contract(self):
"""
Basic inherit contract test
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True,
"inherit_from": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "myepg"
}
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(4)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
tool.exit()
class TestContractEvents(BaseTestCase):
"""
Test contract events
"""
def get_config_json(self):
"""
Get the JSON configuration
:return: Dictionary containing the JSON configuration
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
return config_json
def get_contract(self, tenant):
"""
Get a contract
:param tenant: Instance of Tenant class to contain the contract
:return: Instance of Contract class
"""
contract = Contract('mycontract', tenant)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
return contract
def setup_tenant(self, apic):
"""
Setup the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
context = Context('mycontext', tenant)
l3out = OutsideL3('myl3out', tenant)
parent_epg = OutsideEPG('parentepg', l3out)
parent_network = OutsideNetwork('5.1.1.1', parent_epg)
parent_network.ip = '5.1.1.1/8'
child_epg = OutsideEPG('childepg', l3out)
child_network = OutsideNetwork('5.2.1.1', child_epg)
child_network.ip = '5.2.1.1/16'
contract = self.get_contract(tenant)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def setup_tenant_with_2_parent_epgs(self, apic):
"""
Setup the tenant configuration with 2 parent EPGs
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
context = Context('mycontext', tenant)
l3out = OutsideL3('myl3out', tenant)
parent_epg1 = OutsideEPG('parentepg1', l3out)
parent_network = OutsideNetwork('5.1.1.1', parent_epg1)
parent_network.ip = '5.1.1.1/8'
contract = self.get_contract(tenant)
parent_epg1.provide(contract)
parent_epg2 = OutsideEPG('parentepg2', l3out)
parent_epg2.provide(contract)
parent_network = OutsideNetwork('5.3.1.1', parent_epg2)
parent_network.ip = '5.3.1.1/12'
child_epg = OutsideEPG('childepg', l3out)
child_network = OutsideNetwork('5.2.1.1', child_epg)
child_network.ip = '5.2.1.1/16'
contract = self.get_contract(tenant)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def add_contract(self, apic):
"""
Add the contract
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
l3out = OutsideL3('myl3out', tenant)
parent_epg = OutsideEPG('parentepg', l3out)
contract = self.get_contract(tenant)
parent_epg.provide(contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def remove_contract(self, apic):
"""
Remove the contract
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
l3out = OutsideL3('myl3out', tenant)
parent_epg = OutsideEPG('parentepg', l3out)
contract = self.get_contract(tenant)
parent_epg.provide(contract)
parent_epg.dont_provide(contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def verify_inherited(self, apic, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
l3out = tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
childepg = l3out.get_child(OutsideEPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract'))
contract = tenant.get_child(Contract, 'mycontract')
self.assertIsNotNone(contract)
if not_inherited:
self.assertFalse(childepg.does_provide(contract))
else:
self.assertTrue(childepg.does_provide(contract))
def verify_not_inherited(self, apic):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
self.verify_inherited(apic, not_inherited=True)
def test_basic_inherit_contract(self):
"""
Basic test for inheriting contract
"""
self.delete_tenant()
config_json = self.get_config_json()
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic)
time.sleep(2)
# Add the contract
self.add_contract(apic)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
self.delete_tenant()
def test_inherit_contract_and_delete(self):
"""
Test inheriting the contract and delete the contract
"""
self.delete_tenant()
config_json = self.get_config_json()
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic)
time.sleep(2)
# Add the contract
self.add_contract(apic)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
# Remove the contract from the parent EPG
self.remove_contract(apic)
time.sleep(2)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic)
self.delete_tenant()
def test_dual_inheritance_contract(self):
"""
Test for inheriting from 2 EPGs
"""
self.delete_tenant()
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg1"
},
"allowed": True,
"enabled": False
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg2"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant_with_2_parent_epgs(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
self.delete_tenant()
def test_dual_inheritance_contract_delete_one_relation(self):
"""
Test for inheriting from 2 EPGs and one relation deleted
"""
self.delete_tenant()
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg1"
},
"allowed": True,
"enabled": False
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg2"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant_with_2_parent_epgs(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
# Remove contract
tenant = Tenant('inheritanceautomatedtest')
l3out = OutsideL3('myl3out', tenant)
parent_epg = OutsideEPG('parentepg1', l3out)
contract = self.get_contract(tenant)
parent_epg.provide(contract)
parent_epg.dont_provide(contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
# Verify that the contract is still inherited by the child EPG
time.sleep(2)
self.verify_inherited(apic)
self.delete_tenant()
def test_dual_inheritance_contract_delete_both_relations(self):
"""
Test for inheriting from 2 EPGs and both relations deleted
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg1"
},
"allowed": True,
"enabled": False
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg2"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant_with_2_parent_epgs(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(4)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
# Remove contracts
tenant = Tenant('inheritanceautomatedtest')
l3out = OutsideL3('myl3out', tenant)
contract = self.get_contract(tenant)
parent_epg1 = OutsideEPG('parentepg1', l3out)
parent_epg1.provide(contract)
parent_epg1.dont_provide(contract)
parent_epg2 = OutsideEPG('parentepg2', l3out)
parent_epg2.provide(contract)
parent_epg2.dont_provide(contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
# Verify that the contract is still inherited by the child EPG
time.sleep(4)
self.verify_not_inherited(apic)
self.delete_tenant()
# multiple children
# - verify that an inherited relation can go from parent to child to grandchild
# contract cases
# - add another contract and verify that it gets inherited
# - delete the contract and verify that it gets removed
# subnet cases
# - add subnet and verify that causes to be inherited
# - remove subnet and verify inheritance removed
# - add 2 subnets and verify that causes to be inherited, remove 1 verify still inherited
# - remove inherited relation
class TestSubnetEvents(BaseTestCase):
"""
Test subnet events
"""
def setup_tenant(self, apic):
"""
Setup the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
context = Context('mycontext', tenant)
l3out = OutsideL3('myl3out', tenant)
parent_epg = OutsideEPG('parentepg', l3out)
parent_network = OutsideNetwork('5.1.1.1', parent_epg)
parent_network.ip = '5.1.1.1/8'
_ = OutsideEPG('childepg', l3out)
contract = Contract('mycontract', tenant)
parent_epg.provide(contract)
_ = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def add_child_subnet(self, apic):
"""
Add a child subnet
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
l3out = OutsideL3('myl3out', tenant)
child_epg = OutsideEPG('childepg', l3out)
child_network = OutsideNetwork('5.2.1.1', child_epg)
child_network.ip = '5.2.1.1/16'
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def verify_inherited(self, apic, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
l3out = tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
childepg = l3out.get_child(OutsideEPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract'))
contract = tenant.get_child(Contract, 'mycontract')
self.assertIsNotNone(contract)
if not_inherited:
self.assertFalse(childepg.does_provide(contract))
else:
self.assertTrue(childepg.does_provide(contract))
def verify_not_inherited(self, apic):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
self.verify_inherited(apic, not_inherited=True)
def test_basic_inherit_add_subnet(self):
"""
Basic test to inherit after adding a subnet
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic)
# Add the child subnet
self.add_child_subnet(apic)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
self.delete_tenant()
class TestMultipleOutsideEPGLevels(BaseTestCase):
"""
Test multiple OutsideEPG levels
"""
def setup_tenant(self, apic):
"""
Setup the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
context = Context('mycontext', tenant)
l3out = OutsideL3('myl3out', tenant)
grandparent_epg = OutsideEPG('grandparentepg', l3out)
grandparent_network = OutsideNetwork('10.0.0.0', grandparent_epg)
grandparent_network.ip = '10.0.0.0/8'
parent_epg = OutsideEPG('parentepg', l3out)
parent_network = OutsideNetwork('10.1.0.0', parent_epg)
parent_network.ip = '10.1.0.0/16'
child_epg = OutsideEPG('childepg', l3out)
child_network = OutsideNetwork('10.1.1.0', child_epg)
child_network.ip = '10.1.1.0/24'
contract = Contract('mycontract', tenant)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def verify_inherited(self, apic, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
l3out = tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
childepg = l3out.get_child(OutsideEPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract'))
contract = tenant.get_child(Contract, 'mycontract')
self.assertIsNotNone(contract)
if not_inherited:
self.assertFalse(childepg.does_provide(contract))
else:
self.assertTrue(childepg.does_provide(contract))
def verify_not_inherited(self, apic):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
self.verify_inherited(apic, not_inherited=True)
def test_provide_contract_directly_on_parent_epg(self):
"""
Basic test to inherit after adding a subnet
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": False,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "grandparentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic)
# Provide the contract from the parent EPG
tenant = Tenant('inheritanceautomatedtest')
l3out = OutsideL3('myl3out', tenant)
parent_epg = OutsideEPG('parentepg', l3out)
parent_network = OutsideNetwork('10.1.0.0', parent_epg)
parent_network.ip = '10.1.0.0/16'
contract = Contract('mycontract', tenant)
parent_epg.provide(contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
# Verify that the contract is still not inherited by the child EPG
self.verify_not_inherited(apic)
time.sleep(2)
# Verify that the parent EPG still provides the contract
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
l3out = tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
parentepg = l3out.get_child(OutsideEPG, 'parentepg')
self.assertIsNotNone(parentepg)
self.assertFalse(parentepg.has_tag('inherited:fvRsProv:mycontract'))
contract = tenant.get_child(Contract, 'mycontract')
self.assertIsNotNone(contract)
self.assertTrue(parentepg.does_provide(contract))
self.delete_tenant()
class BaseImportedContract(unittest.TestCase):
"""
Base class for tests for ContractInterface
"""
def delete_tenants(self, provider_tenant_name, consumer_tenant_name):
"""
Delete the tenants. Called before and after tests automatically
:param provider_tenant_name: String containing the tenant name exporting the contract
:param consumer_tenant_name: String containing the tenant name consuming the imported contract
:return: None
"""
provider_tenant = Tenant(provider_tenant_name)
provider_tenant.mark_as_deleted()
consumer_tenant = Tenant(consumer_tenant_name)
consumer_tenant.mark_as_deleted()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(4)
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
tenants = Tenant.get(apic)
for tenant in tenants:
self.assertTrue(tenant.name != provider_tenant_name)
self.assertTrue(tenant.name != consumer_tenant_name)
def setUp(self):
self.delete_tenants('inheritanceautomatedtest-provider', 'inheritanceautomatedtest-consumer')
def tearDown(self):
self.delete_tenants('inheritanceautomatedtest-provider', 'inheritanceautomatedtest-consumer')
def setup_tenants(self, apic, provider_tenant_name, consumer_tenant_name, use_contract_if=True):
"""
Setup 2 tenants with 1 providing a contract that is consumed by the
other tenant
:param apic: Session instance that is assumed to be logged into the APIC
:param provider_tenant_name: String containing the tenant name exporting the contract
:param consumer_tenant_name: String containing the tenant name consuming the imported contract
:return: None
"""
provider_tenant = Tenant(provider_tenant_name)
app = AppProfile('myinheritanceapp', provider_tenant)
epg = EPG('myepg', app)
contract = Contract('mycontract', provider_tenant)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
epg.provide(contract)
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
consumer_tenant = Tenant(consumer_tenant_name)
context = Context('mycontext', consumer_tenant)
l3out = OutsideL3('myl3out', consumer_tenant)
parent_epg = OutsideEPG('parentepg', l3out)
parent_network = OutsideNetwork('5.1.1.1', parent_epg)
parent_network.ip = '5.1.1.1/8'
child_epg = OutsideEPG('childepg', l3out)
if use_contract_if:
contract_if = ContractInterface('mycontract', consumer_tenant)
contract_if.import_contract(contract)
parent_epg.consume_cif(contract_if)
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
else:
parent_epg.consume(contract)
consumer_tenant_json = consumer_tenant.get_json()
for child in consumer_tenant_json['fvTenant']['children']:
if 'vzBrCP' in child:
consumer_tenant_json['fvTenant']['children'].remove(child)
resp = apic.push_to_apic(consumer_tenant.get_url(), consumer_tenant_json)
self.assertTrue(resp.ok)
def add_child_subnet(self, apic, consumer_tenant_name):
"""
Add a child subnet
:param apic: Session instance that is assumed to be logged into the APIC
:param consumer_tenant_name: String containing the tenant name consuming the imported contract
:return: None
"""
tenant = Tenant(consumer_tenant_name)
l3out = OutsideL3('myl3out', tenant)
child_epg = OutsideEPG('childepg', l3out)
child_network = OutsideNetwork('5.2.1.1', child_epg)
child_network.ip = '5.2.1.1/16'
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def verify_inherited(self, apic, provider_tenant_name, consumer_tenant_name,
not_inherited=False, use_contract_if=True):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param provider_tenant_name: String containing the tenant name exporting the contract
:param consumer_tenant_name: String containing the tenant name consuming the imported contract
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
fabric = Fabric()
tenants = Tenant.get_deep(apic, names=[consumer_tenant_name, provider_tenant_name], parent=fabric)
self.assertTrue(len(tenants) > 0)
consumer_tenant = None
provider_tenant = None
for tenant in tenants:
if tenant.name == consumer_tenant_name:
consumer_tenant = tenant
if tenant.name == provider_tenant_name:
provider_tenant = tenant
self.assertIsNotNone(consumer_tenant)
l3out = consumer_tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
childepg = l3out.get_child(OutsideEPG, 'childepg')
self.assertIsNotNone(childepg)
cons_word = 'fvRsCons'
if use_contract_if:
cons_word += 'If'
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:%s:mycontract' % cons_word))
else:
self.assertTrue(childepg.has_tag('inherited:%s:mycontract' % cons_word))
if use_contract_if:
contract_if = consumer_tenant.get_child(ContractInterface, 'mycontract')
else:
contract_if = provider_tenant.get_child(Contract, 'mycontract')
self.assertIsNotNone(contract_if)
if not_inherited:
if use_contract_if:
self.assertFalse(childepg.does_consume_cif(contract_if))
else:
self.assertFalse(childepg.does_consume(contract_if))
else:
if use_contract_if:
self.assertTrue(childepg.does_consume_cif(contract_if))
else:
self.assertTrue(childepg.does_consume(contract_if))
def verify_not_inherited(self, apic, provider_tenant_name, consumer_tenant_name, use_contract_if=True):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:param provider_tenant_name: String containing the tenant name exporting the contract
:param consumer_tenant_name: String containing the tenant name consuming the imported contract
:return: None
"""
self.verify_inherited(apic, provider_tenant_name, consumer_tenant_name,
not_inherited=True, use_contract_if=use_contract_if)
def run_basic_test(self, provider_tenant_name, consumer_tenant_name, use_contract_if=True):
"""
Run the test using the specified tenant names
:param provider_tenant_name: String containing the tenant to export the contract
:param consumer_tenant_name: String containing the tenant to import the contract
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "%s" % consumer_tenant_name,
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "%s" % consumer_tenant_name,
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenants(apic, provider_tenant_name, consumer_tenant_name, use_contract_if=use_contract_if)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic, provider_tenant_name, consumer_tenant_name, use_contract_if=use_contract_if)
# Add the child subnet
self.add_child_subnet(apic, consumer_tenant_name)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic, provider_tenant_name, consumer_tenant_name, use_contract_if=use_contract_if)
class TestImportedContract(BaseImportedContract):
"""
Tests for ContractInterface
"""
def test_basic_inherit_add_subnet(self):
"""
Basic test for inheriting after adding a subnet
"""
provider_tenant_name = 'inheritanceautomatedtest-provider'
consumer_tenant_name = 'inheritanceautomatedtest-consumer'
self.run_basic_test(provider_tenant_name, consumer_tenant_name)
class TestImportedContractFromTenantCommon(BaseImportedContract):
"""
Tests for ContractInterface when Contract is imported from Tenant common
"""
def delete_tenants(self, provider_tenant_name, consumer_tenant_name):
"""
Delete the tenants. Called before and after tests automatically
:param provider_tenant_name: String containing the tenant name exporting the contract
:param consumer_tenant_name: String containing the tenant name consuming the imported contract
:return: None
"""
provider_tenant = Tenant(provider_tenant_name)
app = AppProfile('myinheritanceapp', provider_tenant)
app.mark_as_deleted()
contract = Contract('mycontract', provider_tenant)
contract.mark_as_deleted()
consumer_tenant = Tenant(consumer_tenant_name)
consumer_tenant.mark_as_deleted()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(4)
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
tenants = Tenant.get(apic)
for tenant in tenants:
self.assertTrue(tenant.name != consumer_tenant_name)
def setUp(self):
self.delete_tenants('common', 'inheritanceautomatedtest-consumer')
def tearDown(self):
self.delete_tenants('common', 'inheritanceautomatedtest-consumer')
def test_basic_inherit_add_subnet_provided_by_tenant_common(self):
"""
Basic test for ContractInterface when Contract is imported from Tenant common
"""
provider_tenant_name = 'common'
consumer_tenant_name = 'inheritanceautomatedtest-consumer'
self.run_basic_test(provider_tenant_name, consumer_tenant_name)
class TestImportedContractInterfaceFromTenantCommon(unittest.TestCase):
"""
Tests for contract exported from 1 tenant to tenant common and consumed by another tenant
"""
def delete_tenants(self):
"""
Delete the tenants. Called before and after tests automatically
:return: None
"""
# Login to the APIC
apic = Session(credentials.url, credentials.username, credentials.password)
resp = apic.login()
self.assertTrue(resp.ok)
# Delete the tenant common ContractInterface
common_tenant = Tenant('common')
contract_if = ContractInterface('contract-a-exported', common_tenant)
contract_if.mark_as_deleted()
resp = common_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
# Delete the consumer tenant
consumer_tenant = Tenant('inheritanceautomatedtest-consumer')
consumer_tenant.mark_as_deleted()
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
# Delete the provider tenant
provider_tenant = Tenant('inheritanceautomatedtest-provider')
provider_tenant.mark_as_deleted()
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
# Delete the consumer tenant
consumer_tenant = Tenant('inheritanceautomatedtest-consumer')
consumer_tenant.mark_as_deleted()
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
tenants = Tenant.get(apic)
for tenant in tenants:
self.assertTrue(tenant.name != consumer_tenant.name and tenant.name != provider_tenant.name)
def setUp(self):
self.delete_tenants()
def tearDown(self):
self.delete_tenants()
def verify_inherited(self, apic, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
fabric = Fabric()
tenants = Tenant.get_deep(apic,
names=['common',
'inheritanceautomatedtest-provider',
'inheritanceautomatedtest-consumer'],
parent=fabric)
self.assertTrue(len(tenants) > 0)
consumer_tenant = None
provider_tenant = None
common_tenant = None
for tenant in tenants:
if tenant.name == 'inheritanceautomatedtest-consumer':
consumer_tenant = tenant
if tenant.name == 'inheritanceautomatedtest-provider':
provider_tenant = tenant
if tenant.name == 'common':
common_tenant = tenant
self.assertIsNotNone(consumer_tenant)
self.assertIsNotNone(provider_tenant)
self.assertIsNotNone(common_tenant)
l3out = consumer_tenant.get_child(OutsideL3, 'myl3out')
self.assertIsNotNone(l3out)
childepg = l3out.get_child(OutsideEPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsConsIf:contract-a-exported'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsConsIf:contract-a-exported'))
contract_if = consumer_tenant.get_child(ContractInterface, 'contract-a-exported')
self.assertIsNone(contract_if)
contract_if = common_tenant.get_child(ContractInterface, 'contract-a-exported')
self.assertEqual(contract_if.get_parent(), common_tenant)
if not_inherited:
self.assertFalse(childepg.does_consume_cif(contract_if))
else:
self.assertTrue(childepg.does_consume_cif(contract_if))
def verify_not_inherited(self, apic):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
self.verify_inherited(apic, not_inherited=True)
def setup_tenants(self, apic):
"""
Setup 2 tenants with 1 providing a contract that is consumed by the
other tenant
:param apic: Session instance that is assumed to be logged into the APIC
:return: None
"""
provider_tenant = Tenant('inheritanceautomatedtest-provider')
app = AppProfile('myinheritanceapp', provider_tenant)
epg = EPG('myepg', app)
contract = Contract('mycontract', provider_tenant)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
epg.provide(contract)
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
common_tenant = Tenant('common')
contract_if = ContractInterface('contract-a-exported', common_tenant)
contract_if.import_contract(contract)
resp = common_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
consumer_tenant = Tenant('inheritanceautomatedtest-consumer')
context = Context('mycontext', consumer_tenant)
l3out = OutsideL3('myl3out', consumer_tenant)
parent_epg = OutsideEPG('parentepg', l3out)
parent_network = OutsideNetwork('5.1.1.1', parent_epg)
parent_network.ip = '5.1.1.1/8'
child_epg = OutsideEPG('childepg', l3out)
parent_epg.consume_cif(contract_if)
consumer_tenant_json = consumer_tenant.get_json()
for child in consumer_tenant_json['fvTenant']['children']:
if 'vzCPIf' in child:
consumer_tenant_json['fvTenant']['children'].remove(child)
resp = apic.push_to_apic(consumer_tenant.get_url(), consumer_tenant_json)
self.assertTrue(resp.ok)
def test_basic_inherit(self):
"""
Basic test for when ContractInterface is imported from Tenant common
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest-consumer",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest-consumer",
"epg_container": {
"name": "myl3out",
"container_type": "l3out"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenants(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic)
# Add the child subnet
tenant = Tenant('inheritanceautomatedtest-consumer')
l3out = OutsideL3('myl3out', tenant)
child_epg = OutsideEPG('childepg', l3out)
child_network = OutsideNetwork('5.2.1.1', child_epg)
child_network.ip = '5.2.1.1/16'
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
class TestContractFromTenantCommonUsedInTenant(BaseImportedContract):
"""
Tests for when Contract is imported from Tenant common not using ContractInterface
"""
def delete_tenants(self, provider_tenant_name, consumer_tenant_name, use_contract_if=True):
"""
Delete the tenants. Called before and after tests automatically
:param provider_tenant_name: String containing the tenant name exporting the contract
:param consumer_tenant_name: String containing the tenant name consuming the imported contract
:return: None
"""
provider_tenant = Tenant(provider_tenant_name)
app = AppProfile('myinheritanceapp', provider_tenant)
app.mark_as_deleted()
contract = Contract('mycontract', provider_tenant)
contract.mark_as_deleted()
consumer_tenant = Tenant(consumer_tenant_name)
consumer_tenant.mark_as_deleted()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(4)
resp = provider_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
resp = consumer_tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
time.sleep(2)
tenants = Tenant.get(apic)
for tenant in tenants:
self.assertTrue(tenant.name != consumer_tenant_name)
def setUp(self):
self.delete_tenants('common', 'inheritanceautomatedtest-consumer')
def tearDown(self):
self.delete_tenants('common', 'inheritanceautomatedtest-consumer')
def test_basic_inherit_add_subnet_provided_by_tenant_common(self):
"""
Basic test for ContractInterface when Contract is imported from Tenant common
"""
provider_tenant_name = 'common'
consumer_tenant_name = 'inheritanceautomatedtest-consumer'
self.run_basic_test(provider_tenant_name, consumer_tenant_name, use_contract_if=False)
class TestBasicAppProfile(BaseTestCase):
"""
Basic Inheritance test cases enabled on Application Profile EPGs
"""
def setup_tenant(self, apic):
"""
Setup the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
context = Context('mycontext', tenant)
app = AppProfile('myapp', tenant)
parent_epg = EPG('parentepg', app)
child_epg = EPG('childepg', app)
contract = Contract('mycontract', tenant)
parent_epg.provide(contract)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def verify_inherited(self, apic, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
app = tenant.get_child(AppProfile, 'myapp')
self.assertIsNotNone(app)
childepg = app.get_child(EPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract'))
contract = tenant.get_child(Contract, 'mycontract')
self.assertIsNotNone(contract)
if not_inherited:
self.assertFalse(childepg.does_provide(contract))
else:
self.assertTrue(childepg.does_provide(contract))
def verify_not_inherited(self, apic):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
self.verify_inherited(apic, not_inherited=True)
def test_basic_inherit_contract(self):
"""
Basic inherit contract test
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "childepg"
},
"allowed": True,
"enabled": True,
"inherit_from": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "parentepg"
}
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(4)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
tool.exit()
# self.delete_tenant()
def test_basic_inheritance_disallowed(self):
"""
Basic test for when inheritance is disallowed
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "childepg"
},
"allowed": True,
"enabled": True
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "parentepg"
},
"allowed": False,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_not_inherited(apic)
# self.delete_tenant()
tool.exit()
def test_basic_inheritance_disabled(self):
"""
Basic test for when inheritance is disabled
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "childepg"
},
"allowed": True,
"enabled": False
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
# Verify that the contract is now inherited by the child EPG
self.verify_not_inherited(apic)
tool.exit()
# self.delete_tenant()
def test_get_config(self):
"""
Basic test for getting the configuration
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "childepg"
},
"allowed": True,
"enabled": False
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(2)
config = tool.get_config()
self.assertEqual(config, config_json)
tool.exit()
class TestBasicToolRestart(BaseTestCase):
"""
Basic Inheritance test cases for when the inheritance tool is run and then restarted
"""
def setup_tenant(self, apic, provide_contract=True):
"""
Setup the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
context = Context('mycontext', tenant)
app = AppProfile('myapp', tenant)
parent_epg = EPG('parentepg', app)
child_epg = EPG('childepg', app)
if provide_contract:
contract = Contract('mycontract', tenant)
parent_epg.provide(contract)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def add_contract_to_parent(self, apic):
tenant = Tenant('inheritanceautomatedtest')
app = AppProfile('myapp', tenant)
parent_epg = EPG('parentepg', app)
contract = Contract('mycontract', tenant)
parent_epg.provide(contract)
entry = FilterEntry('webentry1',
applyToFrag='no',
arpOpc='unspecified',
dFromPort='80',
dToPort='80',
etherT='ip',
prot='tcp',
sFromPort='1',
sToPort='65535',
tcpRules='unspecified',
parent=contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def remove_contract_from_parent(self, apic):
"""
Remove the contract previously added in the setup of the tenant configuration
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
tenant = Tenant('inheritanceautomatedtest')
app = AppProfile('myapp', tenant)
parent_epg = EPG('parentepg', app)
contract = Contract('mycontract', tenant)
parent_epg.dont_provide(contract)
resp = tenant.push_to_apic(apic)
self.assertTrue(resp.ok)
def verify_inherited(self, apic, contract_provided=True, not_inherited=False):
"""
Verify that the contracts have properly been inherited (or not inherited)
:param apic: Session instance assumed to be logged into the APIC
:param not_inherited: Boolean to indicate whether to verify that the contracts have properly been inherited or not
:return: None
"""
tenants = Tenant.get_deep(apic, names=['inheritanceautomatedtest'])
self.assertTrue(len(tenants) > 0)
tenant = tenants[0]
app = tenant.get_child(AppProfile, 'myapp')
self.assertIsNotNone(app)
childepg = app.get_child(EPG, 'childepg')
self.assertIsNotNone(childepg)
if not_inherited:
self.assertFalse(childepg.has_tag('inherited:fvRsProv:mycontract'))
else:
self.assertTrue(childepg.has_tag('inherited:fvRsProv:mycontract'))
contract = tenant.get_child(Contract, 'mycontract')
if not contract_provided:
self.assertIsNone(contract)
return
self.assertIsNotNone(contract)
if not_inherited:
self.assertFalse(childepg.does_provide(contract))
else:
self.assertTrue(childepg.does_provide(contract))
def verify_not_inherited(self, apic, contract_provided=True):
"""
Verify that the contracts have not been inherited
:param apic: Session instance assumed to be logged into the APIC
:return: None
"""
self.verify_inherited(apic, contract_provided=contract_provided, not_inherited=True)
@staticmethod
def get_config():
"""
Get the configuration
:return: Dictionary containing the JSON configuration
"""
config_json = {
"apic": {
"user_name": credentials.username,
"password": credentials.password,
"ip_address": credentials.ip_address,
"use_https": False
},
"inheritance_policies": [
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "childepg"
},
"allowed": True,
"enabled": True,
"inherit_from": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "parentepg"
}
},
{
"epg": {
"tenant": "inheritanceautomatedtest",
"epg_container": {
"name": "myapp",
"container_type": "app"
},
"name": "parentepg"
},
"allowed": True,
"enabled": False
}
]
}
return config_json
def test_basic_inherit_contract_add_parent_contract_during_outage(self):
"""
Basic inherit contract test where the parent contract is added during the outage
"""
config_json = self.get_config()
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic, provide_contract=False)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(4)
# Verify that the contract is not inherited by the child EPG
self.verify_not_inherited(apic, contract_provided=False)
tool.exit()
time.sleep(4)
# Remove the contract from the parent EPG
self.add_contract_to_parent(apic)
# Start the tool again
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(4)
# Verify that the contract is now inherited by the child EPG
self.verify_inherited(apic)
tool.exit()
def test_basic_inherit_contract_remove_parent_contract_during_outage(self):
"""
Basic inherit contract test where the parent contract is removed during the outage
"""
config_json = self.get_config()
args = TestArgs()
apic = Session(credentials.url, credentials.username, credentials.password)
apic.login()
self.setup_tenant(apic, provide_contract=True)
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(4)
# Verify that the contract is now inherited by the child EPG
tool.exit()
self.verify_inherited(apic)
time.sleep(4)
# Remove the contract from the parent EPG
self.remove_contract_from_parent(apic)
time.sleep(2)
# Start the tool again
tool = execute_tool(args)
tool.add_config(config_json)
time.sleep(6)
# Verify that the contract is no longer inherited by the child EPG
tool.exit()
self.verify_not_inherited(apic)
credentials = ApicCredentials()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='ACI Inheritance Tool')
parser.add_argument('--config', default=None,
help='.ini file providing APIC credentials')
parser.add_argument('--maxlogfiles', type=int, default=10,
help='Maximum number of log files (default is 10)')
parser.add_argument('--debug', nargs='?',
choices=['verbose', 'warnings', 'critical'],
const='critical',
help='Enable debug messages.')
args, unittest_args = parser.parse_known_args()
# Deal with logging
if args.debug is not None:
if args.debug == 'verbose':
level = logging.DEBUG
elif args.debug == 'warnings':
level = logging.WARNING
else:
level = logging.CRITICAL
else:
level = logging.CRITICAL
format_string = '%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s'
log_formatter = logging.Formatter(format_string)
log_file = 'inheritance_test.%s.log' % str(getpid())
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=5 * 1024 * 1024,
backupCount=args.maxlogfiles,
encoding=None, delay=0)
my_handler.setLevel(level)
my_handler.setFormatter(log_formatter)
logging.getLogger().addHandler(my_handler)
logging.getLogger().setLevel(level)
# Deal with credentials
config_filename = args.config
if config_filename is None:
config_filename = DEFAULT_INI_FILENAME
credentials.set_config(config_filename)
if credentials.ip_address == '0.0.0.0':
print 'APIC credentials not given. Please ensure that there is a .ini file present and credentials are filled in.'
sys.exit()
# Run the tests
live = unittest.TestSuite()
live.addTest(unittest.makeSuite(TestWithoutApicCommunication))
live.addTest(unittest.makeSuite(TestBasicL3Out))
live.addTest(unittest.makeSuite(TestContractEvents))
live.addTest(unittest.makeSuite(TestSubnetEvents))
live.addTest(unittest.makeSuite(TestImportedContract))
live.addTest(unittest.makeSuite(TestImportedContractFromTenantCommon))
live.addTest(unittest.makeSuite(TestBasicAppProfile))
unittest.main(defaultTest='live', argv=sys.argv[:1] + unittest_args)
| 37.037022
| 122
| 0.535111
| 8,340
| 92,037
| 5.744484
| 0.047362
| 0.020664
| 0.015738
| 0.017951
| 0.847774
| 0.826463
| 0.813
| 0.803899
| 0.778977
| 0.761214
| 0
| 0.010352
| 0.374447
| 92,037
| 2,484
| 123
| 37.051932
| 0.821777
| 0.031998
| 0
| 0.743059
| 0
| 0.001089
| 0.142089
| 0.040625
| 0
| 0
| 0
| 0
| 0.076211
| 0
| null | null | 0.025585
| 0.010343
| null | null | 0.000544
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
187c66f7b32e549ad6671c64c990b5ae4eee0e54
| 3,936
|
py
|
Python
|
number_dict.py
|
mr-yamraj/Alexa_2048_game
|
a85f43bc46ac53b30a223034a37cbfe54d1703cd
|
[
"MIT"
] | null | null | null |
number_dict.py
|
mr-yamraj/Alexa_2048_game
|
a85f43bc46ac53b30a223034a37cbfe54d1703cd
|
[
"MIT"
] | null | null | null |
number_dict.py
|
mr-yamraj/Alexa_2048_game
|
a85f43bc46ac53b30a223034a37cbfe54d1703cd
|
[
"MIT"
] | null | null | null |
number_dict = {
"0" : {
"color" : (187,173,160),
"font_size" : 45,
"backgroud_color" : (205,193,180),
"coordinate" : [(0,0), (0,0), (0,0), (0,0)]
},
"2" : {
"color" : (119, 110, 101),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (238, 228, 218),
"coordinate" : [(40,10), (30,3), (25,2), (22,3)]
},
"4" : {
"color" : (119, 110, 101),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (237, 224, 200),
"coordinate" : [(40,10), (30,3), (25,2), (22,3)]
},
"8" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (242, 177, 121),
"coordinate" : [(40,10), (30,3), (25,2), (22,3)]
},
"16" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (235, 140, 82),
"coordinate" : [(15,10), (8,3), (6,2), (6,3)]
},
"32" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (245, 124, 95),
"coordinate" : [(20,10), (10,3), (8,2), (9,3)]
},
"64" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (233, 89, 55),
"coordinate" : [(20,10), (10,3), (8,2), (9,3)]
},
"128" : {
"color" : (249, 246, 242),
"font_size" : [50, 40, 30, 25],
"backgroud_color" : (242, 216, 106),
"coordinate" : [(15,25), (10,15), (10,15), (10,15)]
},
"256" : {
"color" : (249, 246, 242),
"font_size" : [50, 40, 30, 25],
"backgroud_color" : (237, 202, 75),
"coordinate" : [(15,25), (10,15), (10,15), (10,15)]
},
"512" : {
"color" : (249, 246, 242),
"font_size" : [50, 40, 30, 25],
"backgroud_color" : (228, 192, 42),
"coordinate" : [(15,25), (10,15), (10,15), (10,15)]
},
"1024" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (237, 195, 20),
"coordinate" : [(11,30), (8,23), (8,20), (8,18)]
},
"2048" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (237, 195, 20),
"coordinate" : [(13,30), (10,23), (10,20), (10,18)]
},
"4096" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(13,30), (10,23), (10,20), (10,18)]
},
"8192" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(13,30), (10,23), (10,20), (10,18)]
},
"16384" : {
"color" : (249, 246, 242),
"font_size" : [32, 24, 19, 16],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,35), (10,28), (10,25), (9,20)]
},
"32768" : {
"color" : (249, 246, 242),
"font_size" : [32, 24, 19, 16],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(12,35), (11,28), (11,25), (10,20)]
},
"65536" : {
"color" : (249, 246, 242),
"font_size" : [32, 24, 19, 16],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(14,35), (12,28), (11,25), (10,20)]
},
"131072" : {
"color" : (249, 246, 242),
"font_size" : [28, 20, 16, 13],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(10,37), (10,32), (9,26), (9,23)]
},
"262144" : {
"color" : (249, 246, 242),
"font_size" : [28, 20, 16, 13],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,39), (11,32), (10,26), (9,23)]
},
"524288" : {
"color" : (249, 246, 242),
"font_size" : [28, 20, 16, 13],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(12,37), (11,32), (11,26), (9,23)]
},
"1048576" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(9,42), (9,33), (8,28), (8,24)]
},
"2097152" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,42), (10,33), (9,28), (9,24)]
},
"4194304" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(10,42), (10,33), (9,28), (9,24)]
},
"8388608" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,42), (11,33), (10,28), (9,24)]
}
}
| 26.958904
| 52
| 0.495681
| 599
| 3,936
| 3.175292
| 0.161937
| 0.100946
| 0.121451
| 0.154574
| 0.813354
| 0.802839
| 0.802839
| 0.780757
| 0.780757
| 0.672976
| 0
| 0.31652
| 0.194106
| 3,936
| 146
| 53
| 26.958904
| 0.283102
| 0
| 0
| 0.486301
| 0
| 0
| 0.262129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
43ea7669cd6c1a4397f47f103a9131112980bf23
| 3,616
|
py
|
Python
|
tests/python/highgui/seek_test.py
|
qingswu/OpenCV1.2
|
2b57353be30b986c051a6037458d8eb8ee6014e1
|
[
"BSD-3-Clause"
] | 2
|
2018-11-28T08:12:50.000Z
|
2021-05-10T02:15:45.000Z
|
tests/python/highgui/seek_test.py
|
qingswu/OpenCV1.2
|
2b57353be30b986c051a6037458d8eb8ee6014e1
|
[
"BSD-3-Clause"
] | null | null | null |
tests/python/highgui/seek_test.py
|
qingswu/OpenCV1.2
|
2b57353be30b986c051a6037458d8eb8ee6014e1
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This script will test highgui's seek functionality
for different video formats
"""
# import the necessary things for OpenCV and comparson routine
import os
#import python
#from python.highgui import *
#from python.cv import *
import match
from highgui import *
from cv import *
# path to videos and images we need
PREFIX=os.path.join(os.environ["srcdir"],"../../opencv_extra/testdata/python/")
# this is the folder with the videos and images
# and name of output window
IMAGES = PREFIX+"images/"
VIDEOS = PREFIX+"videos/"
show_frames=False
# testing routine, seeks through file and compares read images with frames in frames.QCIF[]
def seek_frame_ok(FILENAME,ERRORS):
# create a video reader using the tiny videofile VIDEOS+FILENAME
video=cvCreateFileCapture(VIDEOS+FILENAME)
if video is None:
# couldn't open video (FAIL)
return 1
if show_frames:
cvNamedWindow("test", CV_WINDOW_AUTOSIZE)
# skip 2 frames and read 3rd frame each until EOF and check if the read image is ok
for k in [0,3,6,9,12,15,18,21,24,27]:
cvSetCaptureProperty(video, CV_CAP_PROP_POS_FRAMES, k)
# try to query frame
image=cvQueryFrame(video)
if image is None:
# returned image is NULL (FAIL)
return 1
compresult = match.match(image,k,ERRORS[k])
if not compresult:
return 1
if show_frames:
cvShowImage("test",image)
cvWaitKey(200)
# same as above, just backwards...
for k in [27,24,21,18,15,12,9,6,3,0]:
cvSetCaptureProperty(video, CV_CAP_PROP_POS_FRAMES, k)
# try to query frame
image=cvQueryFrame(video)
if image is None:
# returned image is NULL (FAIL)
return 1
compresult = match.match(image,k,ERRORS[k])
if not compresult:
return 1
if show_frames:
cvShowImage("test",image)
cvWaitKey(200)
# ATTENTION: We do not release the video reader, window or any image.
# This is bad manners, but Python and OpenCV don't care,
# the whole memory segment will be freed on finish anyway...
del video
# everything is fine (PASS)
return 0
# testing routine, seeks through file and compares read images with frames in frames.QCIF[]
def seek_time_ok(FILENAME,ERRORS):
# create a video reader using the tiny videofile VIDEOS+FILENAME
video=cvCreateFileCapture(VIDEOS+FILENAME)
if video is None:
# couldn't open video (FAIL)
return 1
if show_frames:
cvNamedWindow("test", CV_WINDOW_AUTOSIZE)
# skip 2 frames and read 3rd frame each until EOF and check if the read image is ok
for k in [0,3,6,9,12,15,18,21,24,27]:
cvSetCaptureProperty(video, CV_CAP_PROP_POS_MSEC, k*40)
# try to query frame
image=cvQueryFrame(video)
if image is None:
# returned image is NULL (FAIL)
return 1
compresult = match.match(image,k,ERRORS[k])
if not compresult:
return 1
if show_frames:
cvShowImage("test",image)
cvWaitKey(200)
# same as above, just backwards...
for k in [27,24,21,18,15,12,9,6,3,0]:
cvSetCaptureProperty(video, CV_CAP_PROP_POS_MSEC, k*40)
# try to query frame
image=cvQueryFrame(video)
if image is None:
# returned image is NULL (FAIL)
return 1
compresult = match.match(image,k,ERRORS[k])
if not compresult:
return 1
if show_frames:
cvShowImage("test",image)
cvWaitKey(200)
# ATTENTION: We do not release the video reader, window or any image.
# This is bad manners, but Python and OpenCV don't care,
# the whole memory segment will be freed on finish anyway...
del video
# everything is fine (PASS)
return 0
| 24.598639
| 91
| 0.692754
| 554
| 3,616
| 4.463899
| 0.252708
| 0.028306
| 0.026688
| 0.031541
| 0.829761
| 0.829761
| 0.829761
| 0.829761
| 0.829761
| 0.829761
| 0
| 0.034067
| 0.220686
| 3,616
| 146
| 92
| 24.767123
| 0.843506
| 0.418142
| 0
| 0.852941
| 0
| 0
| 0.038275
| 0.016957
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0.058824
| 0
| 0.264706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a142869de3f973950ac47e1b91112959d2ce59bd
| 556
|
py
|
Python
|
More On Loops/Rectangular Numbers.py
|
SaiPrasanth212/Coding-ninjas-Introduction-To-Python
|
f6aabc3b7b0f2ae82e2870c8f2bd1f37e3fe3005
|
[
"MIT"
] | 2
|
2021-12-13T19:28:40.000Z
|
2022-03-07T16:36:29.000Z
|
More On Loops/Rectangular Numbers.py
|
SaiPrasanth212/Coding-ninjas-Introduction-To-Python
|
f6aabc3b7b0f2ae82e2870c8f2bd1f37e3fe3005
|
[
"MIT"
] | null | null | null |
More On Loops/Rectangular Numbers.py
|
SaiPrasanth212/Coding-ninjas-Introduction-To-Python
|
f6aabc3b7b0f2ae82e2870c8f2bd1f37e3fe3005
|
[
"MIT"
] | null | null | null |
n = int(input())
for i in range(1,n+1):
temp = n
for j in range(1,i):
print(temp,end="")
temp = temp -1
for j in range(1,(2*n) - (2*i) + 2):
print(n-i+1,end="")
for j in range(1,i):
temp = temp+1
print(temp,end="")
print()
for i in range(n-1,0,-1):
temp = n
for j in range(1,i):
print(temp,end="")
temp = temp - 1
for j in range(1,(2*n) - (2*i) + 2):
print(n-i+1,end="")
for j in range(1,i):
temp = temp+1
print(temp,end="")
print()
| 20.592593
| 40
| 0.44964
| 102
| 556
| 2.45098
| 0.137255
| 0.224
| 0.224
| 0.264
| 0.856
| 0.856
| 0.856
| 0.856
| 0.856
| 0.856
| 0
| 0.063712
| 0.350719
| 556
| 26
| 41
| 21.384615
| 0.628809
| 0
| 0
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.347826
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a18daf77e6e05a9d2defb68575af11516accf50c
| 3,151
|
py
|
Python
|
ticket/ticket_queries.py
|
pythonkr/pyconkr-api
|
077e122a0af37122c5b424870cf91b8fca91a9f5
|
[
"Apache-2.0"
] | 25
|
2018-12-09T07:56:16.000Z
|
2020-12-24T08:20:41.000Z
|
ticket/ticket_queries.py
|
pythonkr/pyconkr-api
|
077e122a0af37122c5b424870cf91b8fca91a9f5
|
[
"Apache-2.0"
] | 100
|
2018-12-13T02:01:42.000Z
|
2022-03-11T23:40:25.000Z
|
ticket/ticket_queries.py
|
pythonkr/pyconkr-api
|
077e122a0af37122c5b424870cf91b8fca91a9f5
|
[
"Apache-2.0"
] | 8
|
2019-01-05T05:02:27.000Z
|
2019-08-09T08:14:49.000Z
|
TICKET_PRODUCTS = '''
query getTicketProducts {
tutorialProducts {
id
type
name
nameKo
nameEn
desc
descKo
descEn
warning
warningKo
warningEn
startAt
finishAt
total
remainingCount
isSoldOut
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
isPurchased
}
conferenceProducts {
id
type
name
nameKo
nameEn
desc
descKo
descEn
warning
warningKo
warningEn
startAt
finishAt
total
remainingCount
isSoldOut
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
isPurchased
}
}
'''
BUY_TICKET = '''
mutation BuyTicket($productId: ID!, $payment: PaymentInput!, $options: JSONString) {
buyTicket(productId:$productId, payment: $payment, options:$options) {
ticket{
id
amount
merchantUid
impUid
pgTid
receiptUrl
paidAt
status
}
}
}
'''
MY_TICKETS = '''
query getMyTickets {
myTickets {
isDomesticCard
amount
merchantUid
receiptUrl
paidAt
cancelReceiptUrl
cancelledAt
status
product{
id
type
name
nameKo
nameEn
desc
descKo
descEn
startAt
finishAt
total
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
}
options
}
}
'''
TICKET = '''
query getTicket($globalId: ID, $id: Int) {
ticket(globalId: $globalId, id: $id) {
isDomesticCard
amount
merchantUid
receiptUrl
paidAt
cancelReceiptUrl
cancelledAt
status
product{
id
type
name
nameKo
nameEn
desc
descKo
descEn
startAt
finishAt
total
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
}
options
}
}
'''
CANCEL_TICKET = '''
mutation cancelTicket($ticketId: ID!) {
cancelTicket(ticketId:$ticketId) {
ticket{
id
status
impUid
pgTid
receiptUrl
paidAt
cancelReceiptUrl
cancelledAt
}
}
}
'''
| 14.520737
| 84
| 0.526817
| 215
| 3,151
| 7.702326
| 0.311628
| 0.048309
| 0.077295
| 0.038647
| 0.704106
| 0.704106
| 0.704106
| 0.704106
| 0.704106
| 0.704106
| 0
| 0
| 0.433196
| 3,151
| 216
| 85
| 14.587963
| 0.927212
| 0
| 0
| 0.814286
| 0
| 0
| 0.965725
| 0.033957
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1990601a445234322c849c04e29628eba4a7fd3
| 58,940
|
py
|
Python
|
infoblox_netmri/api/broker/v3_8_0/end_host_mac_address_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 12
|
2016-02-19T12:37:54.000Z
|
2022-03-04T20:11:08.000Z
|
infoblox_netmri/api/broker/v3_8_0/end_host_mac_address_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2015-11-12T18:37:00.000Z
|
2021-05-19T07:59:55.000Z
|
infoblox_netmri/api/broker/v3_8_0/end_host_mac_address_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2016-01-07T12:04:34.000Z
|
2022-03-31T11:05:41.000Z
|
from ..broker import Broker
class EndHostMacAddressBroker(Broker):
controller = "end_host_mac_addresses"
def index(self, **kwargs):
"""Lists the available end host mac addresses. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the associated Device record.
:type DeviceID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndHostMACAddressID: The internal NetMRI identifier for the End Host MAC Address entry.
:type EndHostMACAddressID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPAddress: The IP address of the end host.
:type IPAddress: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InfraDeviceID: The internal NetMRI identifier for the InfraDevice on which the end host was found.
:type InfraDeviceID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the interface on which the end host was found.
:type InterfaceID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param MACAddress: The MAC address of the end host.
:type MACAddress: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NeighborID: The internal NetMRI identifier for the associated Neighbor record.
:type NeighborID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` EndHostMACAddressID
:param sort: The data field(s) to use for sorting the output. Default is EndHostMACAddressID. Valid values are EndHostMACAddressID, NetworkID, Network, MACAddress, IPAddress, IPAddressNumeric, DataSourceID, DeviceType, DeviceName, DeviceNetBIOSName, DeviceID, ifIndex, InterfaceID, InfraDeviceID, NeighborID, EndHostMACAddressTimestamp, FirstSeenTime, EndHostMACAddressStartTime, EndHostMACAddressEndTime, EndHostMACAddressChangedCols.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each EndHostMacAddress. Valid values are EndHostMACAddressID, NetworkID, Network, MACAddress, IPAddress, IPAddressNumeric, DataSourceID, DeviceType, DeviceName, DeviceNetBIOSName, DeviceID, ifIndex, InterfaceID, InfraDeviceID, NeighborID, EndHostMACAddressTimestamp, FirstSeenTime, EndHostMACAddressStartTime, EndHostMACAddressEndTime, EndHostMACAddressChangedCols. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return end_host_mac_addresses: An array of the EndHostMacAddress objects that match the specified input criteria.
:rtype end_host_mac_addresses: Array of EndHostMacAddress
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def show(self, **kwargs):
"""Shows the details for the specified end host mac address.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param EndHostMACAddressID: The internal NetMRI identifier for the End Host MAC Address entry.
:type EndHostMACAddressID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return end_host_mac_address: The end host mac address identified by the specified EndHostMACAddressID.
:rtype end_host_mac_address: EndHostMacAddress
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def search(self, **kwargs):
"""Lists the available end host mac addresses matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the associated Device record.
:type DeviceID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceName: The determined name of the end host.
:type DeviceName: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceNetBIOSName: The NetBIOS name of the end host.
:type DeviceNetBIOSName: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceType: The determined type of the end host.
:type DeviceType: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndHostMACAddressChangedCols: The fields that changed between this revision of the record and the previous revision.
:type EndHostMACAddressChangedCols: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndHostMACAddressEndTime: The ending effective time of this record, or empty if still in effect.
:type EndHostMACAddressEndTime: Array of DateTime
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndHostMACAddressID: The internal NetMRI identifier for the End Host MAC Address entry.
:type EndHostMACAddressID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndHostMACAddressStartTime: The starting effective time of this record.
:type EndHostMACAddressStartTime: Array of DateTime
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndHostMACAddressTimestamp: The date and time this record was collected or calculated.
:type EndHostMACAddressTimestamp: Array of DateTime
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FirstSeenTime: The date and time this record was first seen.
:type FirstSeenTime: Array of DateTime
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPAddress: The IP address of the end host.
:type IPAddress: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPAddressNumeric: The IP address of the end host, in numerical form.
:type IPAddressNumeric: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InfraDeviceID: The internal NetMRI identifier for the InfraDevice on which the end host was found.
:type InfraDeviceID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the interface on which the end host was found.
:type InterfaceID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param MACAddress: The MAC address of the end host.
:type MACAddress: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NeighborID: The internal NetMRI identifier for the associated Neighbor record.
:type NeighborID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Network: The name of the Network View associated.
:type Network: Array of String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NetworkID: The internal NetMRI identifier of the associated network.
:type NetworkID: Array of Integer
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifIndex: The interface index on which the end host was found.
:type ifIndex: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` EndHostMACAddressID
:param sort: The data field(s) to use for sorting the output. Default is EndHostMACAddressID. Valid values are EndHostMACAddressID, NetworkID, Network, MACAddress, IPAddress, IPAddressNumeric, DataSourceID, DeviceType, DeviceName, DeviceNetBIOSName, DeviceID, ifIndex, InterfaceID, InfraDeviceID, NeighborID, EndHostMACAddressTimestamp, FirstSeenTime, EndHostMACAddressStartTime, EndHostMACAddressEndTime, EndHostMACAddressChangedCols.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each EndHostMacAddress. Valid values are EndHostMACAddressID, NetworkID, Network, MACAddress, IPAddress, IPAddressNumeric, DataSourceID, DeviceType, DeviceName, DeviceNetBIOSName, DeviceID, ifIndex, InterfaceID, InfraDeviceID, NeighborID, EndHostMACAddressTimestamp, FirstSeenTime, EndHostMACAddressStartTime, EndHostMACAddressEndTime, EndHostMACAddressChangedCols. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against end host mac addresses, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, DeviceID, DeviceName, DeviceNetBIOSName, DeviceType, EndHostMACAddressChangedCols, EndHostMACAddressEndTime, EndHostMACAddressID, EndHostMACAddressStartTime, EndHostMACAddressTimestamp, FirstSeenTime, IPAddress, IPAddressNumeric, InfraDeviceID, InterfaceID, MACAddress, NeighborID, Network, NetworkID, ifIndex.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return end_host_mac_addresses: An array of the EndHostMacAddress objects that match the specified input criteria.
:rtype end_host_mac_addresses: Array of EndHostMacAddress
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available end host mac addresses matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, DeviceID, DeviceName, DeviceNetBIOSName, DeviceType, EndHostMACAddressChangedCols, EndHostMACAddressEndTime, EndHostMACAddressID, EndHostMACAddressStartTime, EndHostMACAddressTimestamp, FirstSeenTime, IPAddress, IPAddressNumeric, InfraDeviceID, InterfaceID, MACAddress, NeighborID, Network, NetworkID, ifIndex.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the associated Device record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceName: The operator to apply to the field DeviceName. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceName: The determined name of the end host. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceName: If op_DeviceName is specified, the field named in this input will be compared to the value in DeviceName using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceName must be specified if op_DeviceName is specified.
:type val_f_DeviceName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceName: If op_DeviceName is specified, this value will be compared to the value in DeviceName using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceName must be specified if op_DeviceName is specified.
:type val_c_DeviceName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceNetBIOSName: The operator to apply to the field DeviceNetBIOSName. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceNetBIOSName: The NetBIOS name of the end host. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceNetBIOSName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceNetBIOSName: If op_DeviceNetBIOSName is specified, the field named in this input will be compared to the value in DeviceNetBIOSName using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceNetBIOSName must be specified if op_DeviceNetBIOSName is specified.
:type val_f_DeviceNetBIOSName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceNetBIOSName: If op_DeviceNetBIOSName is specified, this value will be compared to the value in DeviceNetBIOSName using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceNetBIOSName must be specified if op_DeviceNetBIOSName is specified.
:type val_c_DeviceNetBIOSName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceType: The operator to apply to the field DeviceType. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceType: The determined type of the end host. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceType: If op_DeviceType is specified, the field named in this input will be compared to the value in DeviceType using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceType must be specified if op_DeviceType is specified.
:type val_f_DeviceType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceType: If op_DeviceType is specified, this value will be compared to the value in DeviceType using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceType must be specified if op_DeviceType is specified.
:type val_c_DeviceType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_EndHostMACAddressChangedCols: The operator to apply to the field EndHostMACAddressChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. EndHostMACAddressChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_EndHostMACAddressChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_EndHostMACAddressChangedCols: If op_EndHostMACAddressChangedCols is specified, the field named in this input will be compared to the value in EndHostMACAddressChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_EndHostMACAddressChangedCols must be specified if op_EndHostMACAddressChangedCols is specified.
:type val_f_EndHostMACAddressChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_EndHostMACAddressChangedCols: If op_EndHostMACAddressChangedCols is specified, this value will be compared to the value in EndHostMACAddressChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_EndHostMACAddressChangedCols must be specified if op_EndHostMACAddressChangedCols is specified.
:type val_c_EndHostMACAddressChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_EndHostMACAddressEndTime: The operator to apply to the field EndHostMACAddressEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. EndHostMACAddressEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_EndHostMACAddressEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_EndHostMACAddressEndTime: If op_EndHostMACAddressEndTime is specified, the field named in this input will be compared to the value in EndHostMACAddressEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_EndHostMACAddressEndTime must be specified if op_EndHostMACAddressEndTime is specified.
:type val_f_EndHostMACAddressEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_EndHostMACAddressEndTime: If op_EndHostMACAddressEndTime is specified, this value will be compared to the value in EndHostMACAddressEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_EndHostMACAddressEndTime must be specified if op_EndHostMACAddressEndTime is specified.
:type val_c_EndHostMACAddressEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_EndHostMACAddressID: The operator to apply to the field EndHostMACAddressID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. EndHostMACAddressID: The internal NetMRI identifier for the End Host MAC Address entry. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_EndHostMACAddressID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_EndHostMACAddressID: If op_EndHostMACAddressID is specified, the field named in this input will be compared to the value in EndHostMACAddressID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_EndHostMACAddressID must be specified if op_EndHostMACAddressID is specified.
:type val_f_EndHostMACAddressID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_EndHostMACAddressID: If op_EndHostMACAddressID is specified, this value will be compared to the value in EndHostMACAddressID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_EndHostMACAddressID must be specified if op_EndHostMACAddressID is specified.
:type val_c_EndHostMACAddressID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_EndHostMACAddressStartTime: The operator to apply to the field EndHostMACAddressStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. EndHostMACAddressStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_EndHostMACAddressStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_EndHostMACAddressStartTime: If op_EndHostMACAddressStartTime is specified, the field named in this input will be compared to the value in EndHostMACAddressStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_EndHostMACAddressStartTime must be specified if op_EndHostMACAddressStartTime is specified.
:type val_f_EndHostMACAddressStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_EndHostMACAddressStartTime: If op_EndHostMACAddressStartTime is specified, this value will be compared to the value in EndHostMACAddressStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_EndHostMACAddressStartTime must be specified if op_EndHostMACAddressStartTime is specified.
:type val_c_EndHostMACAddressStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_EndHostMACAddressTimestamp: The operator to apply to the field EndHostMACAddressTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. EndHostMACAddressTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_EndHostMACAddressTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_EndHostMACAddressTimestamp: If op_EndHostMACAddressTimestamp is specified, the field named in this input will be compared to the value in EndHostMACAddressTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_EndHostMACAddressTimestamp must be specified if op_EndHostMACAddressTimestamp is specified.
:type val_f_EndHostMACAddressTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_EndHostMACAddressTimestamp: If op_EndHostMACAddressTimestamp is specified, this value will be compared to the value in EndHostMACAddressTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_EndHostMACAddressTimestamp must be specified if op_EndHostMACAddressTimestamp is specified.
:type val_c_EndHostMACAddressTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FirstSeenTime: The operator to apply to the field FirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FirstSeenTime: The date and time this record was first seen. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FirstSeenTime: If op_FirstSeenTime is specified, the field named in this input will be compared to the value in FirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FirstSeenTime must be specified if op_FirstSeenTime is specified.
:type val_f_FirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FirstSeenTime: If op_FirstSeenTime is specified, this value will be compared to the value in FirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FirstSeenTime must be specified if op_FirstSeenTime is specified.
:type val_c_FirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPAddress: The operator to apply to the field IPAddress. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPAddress: The IP address of the end host. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPAddress: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPAddress: If op_IPAddress is specified, the field named in this input will be compared to the value in IPAddress using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPAddress must be specified if op_IPAddress is specified.
:type val_f_IPAddress: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPAddress: If op_IPAddress is specified, this value will be compared to the value in IPAddress using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPAddress must be specified if op_IPAddress is specified.
:type val_c_IPAddress: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPAddressNumeric: The operator to apply to the field IPAddressNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPAddressNumeric: The IP address of the end host, in numerical form. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPAddressNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPAddressNumeric: If op_IPAddressNumeric is specified, the field named in this input will be compared to the value in IPAddressNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPAddressNumeric must be specified if op_IPAddressNumeric is specified.
:type val_f_IPAddressNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPAddressNumeric: If op_IPAddressNumeric is specified, this value will be compared to the value in IPAddressNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPAddressNumeric must be specified if op_IPAddressNumeric is specified.
:type val_c_IPAddressNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InfraDeviceID: The operator to apply to the field InfraDeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InfraDeviceID: The internal NetMRI identifier for the InfraDevice on which the end host was found. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InfraDeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InfraDeviceID: If op_InfraDeviceID is specified, the field named in this input will be compared to the value in InfraDeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InfraDeviceID must be specified if op_InfraDeviceID is specified.
:type val_f_InfraDeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InfraDeviceID: If op_InfraDeviceID is specified, this value will be compared to the value in InfraDeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InfraDeviceID must be specified if op_InfraDeviceID is specified.
:type val_c_InfraDeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InterfaceID: The operator to apply to the field InterfaceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InterfaceID: The internal NetMRI identifier for the interface on which the end host was found. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InterfaceID: If op_InterfaceID is specified, the field named in this input will be compared to the value in InterfaceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InterfaceID must be specified if op_InterfaceID is specified.
:type val_f_InterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InterfaceID: If op_InterfaceID is specified, this value will be compared to the value in InterfaceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InterfaceID must be specified if op_InterfaceID is specified.
:type val_c_InterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_MACAddress: The operator to apply to the field MACAddress. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. MACAddress: The MAC address of the end host. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_MACAddress: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_MACAddress: If op_MACAddress is specified, the field named in this input will be compared to the value in MACAddress using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_MACAddress must be specified if op_MACAddress is specified.
:type val_f_MACAddress: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_MACAddress: If op_MACAddress is specified, this value will be compared to the value in MACAddress using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_MACAddress must be specified if op_MACAddress is specified.
:type val_c_MACAddress: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NeighborID: The operator to apply to the field NeighborID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NeighborID: The internal NetMRI identifier for the associated Neighbor record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NeighborID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NeighborID: If op_NeighborID is specified, the field named in this input will be compared to the value in NeighborID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NeighborID must be specified if op_NeighborID is specified.
:type val_f_NeighborID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NeighborID: If op_NeighborID is specified, this value will be compared to the value in NeighborID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NeighborID must be specified if op_NeighborID is specified.
:type val_c_NeighborID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_Network: The operator to apply to the field Network. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. Network: The name of the Network View associated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_Network: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_Network: If op_Network is specified, the field named in this input will be compared to the value in Network using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_Network must be specified if op_Network is specified.
:type val_f_Network: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_Network: If op_Network is specified, this value will be compared to the value in Network using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_Network must be specified if op_Network is specified.
:type val_c_Network: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NetworkID: The operator to apply to the field NetworkID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NetworkID: The internal NetMRI identifier of the associated network. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NetworkID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NetworkID: If op_NetworkID is specified, the field named in this input will be compared to the value in NetworkID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NetworkID must be specified if op_NetworkID is specified.
:type val_f_NetworkID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NetworkID: If op_NetworkID is specified, this value will be compared to the value in NetworkID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NetworkID must be specified if op_NetworkID is specified.
:type val_c_NetworkID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifIndex: The operator to apply to the field ifIndex. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifIndex: The interface index on which the end host was found. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifIndex: If op_ifIndex is specified, the field named in this input will be compared to the value in ifIndex using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifIndex must be specified if op_ifIndex is specified.
:type val_f_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifIndex: If op_ifIndex is specified, this value will be compared to the value in ifIndex using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifIndex must be specified if op_ifIndex is specified.
:type val_c_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` EndHostMACAddressID
:param sort: The data field(s) to use for sorting the output. Default is EndHostMACAddressID. Valid values are EndHostMACAddressID, NetworkID, Network, MACAddress, IPAddress, IPAddressNumeric, DataSourceID, DeviceType, DeviceName, DeviceNetBIOSName, DeviceID, ifIndex, InterfaceID, InfraDeviceID, NeighborID, EndHostMACAddressTimestamp, FirstSeenTime, EndHostMACAddressStartTime, EndHostMACAddressEndTime, EndHostMACAddressChangedCols.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each EndHostMacAddress. Valid values are EndHostMACAddressID, NetworkID, Network, MACAddress, IPAddress, IPAddressNumeric, DataSourceID, DeviceType, DeviceName, DeviceNetBIOSName, DeviceID, ifIndex, InterfaceID, InfraDeviceID, NeighborID, EndHostMACAddressTimestamp, FirstSeenTime, EndHostMACAddressStartTime, EndHostMACAddressEndTime, EndHostMACAddressChangedCols. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return end_host_mac_addresses: An array of the EndHostMacAddress objects that match the specified input criteria.
:rtype end_host_mac_addresses: Array of EndHostMacAddress
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
| 58.822355
| 744
| 0.632559
| 7,173
| 58,940
| 5.138436
| 0.040011
| 0.064572
| 0.041972
| 0.054886
| 0.942075
| 0.939579
| 0.904091
| 0.888898
| 0.877232
| 0.872836
| 0
| 0.003176
| 0.289447
| 58,940
| 1,001
| 745
| 58.881119
| 0.87691
| 0.841585
| 0
| 0
| 0
| 0
| 0.068792
| 0.036913
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1b0ee6219afd346e1b3d62914ae7d3665857677
| 2,509
|
py
|
Python
|
tests/test_pages.py
|
calibear20/NHentai-API
|
c543f96f4088dd0f25842e9935f2f6c84317dc55
|
[
"MIT"
] | 33
|
2020-07-12T04:00:05.000Z
|
2022-03-27T12:50:57.000Z
|
tests/test_pages.py
|
calibear20/NHentai-API
|
c543f96f4088dd0f25842e9935f2f6c84317dc55
|
[
"MIT"
] | 16
|
2020-07-24T14:37:11.000Z
|
2022-03-06T01:57:02.000Z
|
tests/test_pages.py
|
calibear20/NHentai-API
|
c543f96f4088dd0f25842e9935f2f6c84317dc55
|
[
"MIT"
] | 14
|
2020-07-09T18:42:13.000Z
|
2022-03-11T13:30:06.000Z
|
import sys
import os
import pytest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from NHentai import NHentai
from NHentai import NHentaiAsync
def test_standard_payload_integrity_home_page():
pages = NHentai().get_pages(page=1)
doujins = pages.doujins
for doujin in doujins:
assert doujin.id is not None
assert doujin.title is not None
assert doujin.languages is not None
assert doujin.cover is not None
assert doujin.tags is not None
def test_standard_payload_integrity_characters_page():
chars = NHentai().get_characters(page=1)
assert chars.page is not None and isinstance(chars.page, int)
assert chars.total_pages is not None and isinstance(chars.total_pages, int)
assert chars.characters is not None and isinstance(chars.characters, list)
for char in chars.characters:
assert char.section is not None and isinstance(char.section, str)
assert char.title is not None and isinstance(char.title, str)
assert char.url is not None and isinstance(char.url, str)
assert char.total_entries is not None and isinstance(char.total_entries, int)
def test_if_all_required_keys_arent_none():
doujins = NHentai().get_pages(1)
for doujin in doujins.doujins:
assert doujin.id is not None
assert doujin.media_id is not None
assert doujin.cover.media_id is not None
@pytest.mark.asyncio
async def test_async_payload_integrity_home_page():
pages = await NHentaiAsync().get_pages(page=1)
doujins = pages.doujins
for doujin in doujins:
assert doujin.id is not None
assert doujin.title is not None
assert doujin.languages is not None
assert doujin.cover is not None
assert doujin.tags is not None
@pytest.mark.asyncio
async def test_async_payload_integrity_characters_page():
chars = await NHentaiAsync().get_characters(1)
assert chars.page is not None and isinstance(chars.page, int)
assert chars.total_pages is not None and isinstance(chars.total_pages, int)
assert chars.characters is not None and isinstance(chars.characters, list)
for char in chars.characters:
assert char.section is not None and isinstance(char.section, str)
assert char.title is not None and isinstance(char.title, str)
assert char.url is not None and isinstance(char.url, str)
assert char.total_entries is not None and isinstance(char.total_entries, int)
| 38.6
| 85
| 0.726983
| 374
| 2,509
| 4.756684
| 0.157754
| 0.075885
| 0.136594
| 0.094435
| 0.838111
| 0.76054
| 0.7448
| 0.7448
| 0.7448
| 0.721192
| 0
| 0.003006
| 0.204464
| 2,509
| 65
| 86
| 38.6
| 0.888277
| 0
| 0
| 0.634615
| 0
| 0
| 0.000797
| 0
| 0
| 0
| 0
| 0
| 0.519231
| 1
| 0.057692
| false
| 0
| 0.096154
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1c61b1cf8f4c213cd228e70628b93c64038a227
| 2,258
|
py
|
Python
|
hamlpy/test/ext_test.py
|
helmus/HamlPy
|
acb79e14381ce46e6d1cb64e7cb154751ae02dfe
|
[
"MIT"
] | 98
|
2015-01-03T05:43:36.000Z
|
2022-01-29T04:55:56.000Z
|
hamlpy/test/ext_test.py
|
helmus/HamlPy
|
acb79e14381ce46e6d1cb64e7cb154751ae02dfe
|
[
"MIT"
] | 16
|
2015-01-19T16:02:47.000Z
|
2020-10-28T12:07:24.000Z
|
hamlpy/test/ext_test.py
|
helmus/HamlPy
|
acb79e14381ce46e6d1cb64e7cb154751ae02dfe
|
[
"MIT"
] | 32
|
2015-01-13T16:35:44.000Z
|
2021-08-01T20:01:28.000Z
|
import unittest
import os
from hamlpy.ext import has_any_extension
class ExtTest(unittest.TestCase):
"""
Tests for methods found in ../ext.py
"""
def test_has_any_extension(self):
extensions = [
'hamlpy',
'haml',
'.txt'
]
# no directory
self.assertTrue(has_any_extension('dir.hamlpy', extensions))
self.assertTrue(has_any_extension('dir.haml', extensions))
self.assertTrue(has_any_extension('dir.txt', extensions))
self.assertFalse(has_any_extension('dir.html', extensions))
# with dot in filename
self.assertTrue(has_any_extension('dir.dot.hamlpy', extensions))
self.assertTrue(has_any_extension('dir.dot.haml', extensions))
self.assertTrue(has_any_extension('dir.dot.txt', extensions))
self.assertFalse(has_any_extension('dir.dot.html', extensions))
# relative path
self.assertTrue(has_any_extension('../dir.hamlpy', extensions))
self.assertTrue(has_any_extension('../dir.haml', extensions))
self.assertTrue(has_any_extension('../dir.txt', extensions))
self.assertFalse(has_any_extension('../dir.html', extensions))
# with dot in filename
self.assertTrue(has_any_extension('../dir.dot.hamlpy', extensions))
self.assertTrue(has_any_extension('../dir.dot.haml', extensions))
self.assertTrue(has_any_extension('../dir.dot.txt', extensions))
self.assertFalse(has_any_extension('../dir.dot.html', extensions))
# absolute paths
self.assertTrue(has_any_extension('/home/user/dir.hamlpy', extensions))
self.assertTrue(has_any_extension('/home/user/dir.haml', extensions))
self.assertTrue(has_any_extension('/home/user/dir.txt', extensions))
self.assertFalse(has_any_extension('/home/user/dir.html', extensions))
# with dot in filename
self.assertTrue(has_any_extension('/home/user/dir.dot.hamlpy', extensions))
self.assertTrue(has_any_extension('/home/user/dir.dot.haml', extensions))
self.assertTrue(has_any_extension('/home/user/dir.dot.txt', extensions))
self.assertFalse(has_any_extension('/home/user/dir.dot.html', extensions))
| 48.042553
| 83
| 0.671391
| 270
| 2,258
| 5.418519
| 0.144444
| 0.10663
| 0.266576
| 0.24607
| 0.856459
| 0.856459
| 0.856459
| 0.854409
| 0.827068
| 0.817498
| 0
| 0
| 0.190434
| 2,258
| 47
| 84
| 48.042553
| 0.800328
| 0.062888
| 0
| 0
| 0
| 0
| 0.177481
| 0.054389
| 0
| 0
| 0
| 0
| 0.705882
| 1
| 0.029412
| false
| 0
| 0.088235
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a1da870d6adb2818d78866e0921f50d41053d9cc
| 13,243
|
py
|
Python
|
main/modeles/repositories/vmTaxonsRepository.py
|
Splendens/atlas_biodiv_pdl
|
eff4bcc9193b76462ede0365b9faec3e0706d5d8
|
[
"BSD-2-Clause"
] | 3
|
2018-07-31T14:30:18.000Z
|
2020-11-21T06:43:18.000Z
|
main/modeles/repositories/vmTaxonsRepository.py
|
Splendens/atlas_biodiv_pdl
|
eff4bcc9193b76462ede0365b9faec3e0706d5d8
|
[
"BSD-2-Clause"
] | null | null | null |
main/modeles/repositories/vmTaxonsRepository.py
|
Splendens/atlas_biodiv_pdl
|
eff4bcc9193b76462ede0365b9faec3e0706d5d8
|
[
"BSD-2-Clause"
] | 2
|
2018-11-23T10:00:30.000Z
|
2018-11-23T22:33:11.000Z
|
# -*- coding:utf-8 -*-
import unicodedata
from ...configuration import config
from sqlalchemy.sql import text
from .. import utils
def deleteAccent(string):
return unicodedata.normalize('NFD', string).encode('ascii', 'ignore')
# With distinct the result in a array not an object, 0: lb_nom, 1: nom_vern
def getTaxonsCommunes(connection, insee):
sql = """
SELECT DISTINCT
o.cd_ref, max(date_part('year'::text, o.dateobs)) as last_obs,
COUNT(o.id_observation) AS nb_obs, t.nom_complet_html, t.nom_vern,
t.group2_inpn, t.patrimonial, t.protection_stricte,
m.url, m.chemin, m.id_media
FROM atlas.vm_observations o
JOIN atlas.vm_taxons t ON t.cd_ref=o.cd_ref
LEFT JOIN atlas.vm_medias m ON m.cd_ref=o.cd_ref AND m.id_type={}
WHERE o.insee = :thisInsee
GROUP BY o.cd_ref, t.nom_vern, t.nom_complet_html, t.group2_inpn,
t.patrimonial, t.protection_stricte, m.url, m.chemin, m.id_media
ORDER BY group2_inpn, nom_complet_html ASC
""".format(config.ATTR_MAIN_PHOTO)
req = connection.execute(text(sql), thisInsee=insee)
taxonCommunesList = list()
nbObsTotal = 0
for r in req:
temp = {
'nom_complet_html': r.nom_complet_html,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.last_obs,
'group2_inpn': deleteAccent(r.group2_inpn),
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte,
'path': utils.findPath(r),
'id_media': r.id_media
}
taxonCommunesList.append(temp)
nbObsTotal = nbObsTotal + r.nb_obs
return {'taxons': taxonCommunesList, 'nbObsTotal': nbObsTotal}
# With distinct the result in a array not an object, 0: lb_nom, 1: nom_vern
def getTaxonsEpci(connection, nom_epci_simple):
sql = """
with taxonepci AS (
SELECT DISTINCT
o.cd_ref, max(date_part('year'::text, o.dateobs)) as last_obs,
COUNT(o.id_observation) AS nb_obs, t.nom_complet_html, t.nom_vern,
t.group2_inpn, t.patrimonial, t.protection_stricte, o.insee,
m.url, m.chemin, m.id_media
FROM atlas.vm_observations o
JOIN atlas.vm_taxons t ON t.cd_ref=o.cd_ref
JOIN atlas.l_communes_epci ec ON ec.insee = o.insee
JOIN atlas.vm_epci e ON ec.id = e.id
LEFT JOIN atlas.vm_medias m ON m.cd_ref=o.cd_ref AND id_type={}
WHERE e.nom_epci_simple = :thisNomEpciSimple
GROUP BY o.cd_ref, t.nom_vern, t.nom_complet_html, t.group2_inpn,
t.patrimonial, t.protection_stricte, o.insee, m.url, m.chemin, m.id_media
ORDER BY o.cd_ref DESC
)
select DISTINCT
cd_ref, max(last_obs) as last_obs,
SUM(nb_obs) AS nb_obs, nom_complet_html, nom_vern,
group2_inpn, patrimonial, protection_stricte,
url, chemin, id_media
from taxonepci
GROUP BY cd_ref, nom_vern, nom_complet_html, group2_inpn,
patrimonial, protection_stricte, url, chemin, id_media
ORDER BY group2_inpn, nom_complet_html ASC
""".format(config.ATTR_MAIN_PHOTO)
req = connection.execute(text(sql), thisNomEpciSimple=nom_epci_simple)
taxonEpciList = list()
nbObsTotal = 0
for r in req:
temp = {
'nom_complet_html': r.nom_complet_html,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.last_obs,
'group2_inpn': deleteAccent(r.group2_inpn),
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte,
'path': utils.findPath(r),
'id_media': r.id_media
}
taxonEpciList.append(temp)
nbObsTotal = nbObsTotal + r.nb_obs
return {'taxons': taxonEpciList, 'nbObsTotal': nbObsTotal}
# With distinct the result in a array not an object, 0: lb_nom, 1: nom_vern
def getTaxonsDpt(connection, num_dpt):
sql = """
SELECT *
FROM atlas.vm_synthese_obs_taxons_dpt
WHERE num_dpt = :thisNumdpt
ORDER BY group2_inpn, nom_complet_html ASC
""".format(config.ATTR_MAIN_PHOTO)
req = connection.execute(text(sql), thisNumdpt=num_dpt)
taxonDptList = list()
nbObsTotal = 0
for r in req:
temp = {
'nom_complet_html': r.nom_complet_html,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.last_obs,
'group2_inpn': deleteAccent(r.group2_inpn),
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte,
'path': utils.findPath(r),
'id_media': r.id_media
}
taxonDptList.append(temp)
nbObsTotal = nbObsTotal + r.nb_obs
return {'taxons': taxonDptList, 'nbObsTotal': nbObsTotal}
# With distinct the result in a array not an object, 0: lb_nom, 1: nom_vern
def getListeTaxonsCommunes(connection, insee):
sql = """
SELECT DISTINCT
o.cd_ref, max(date_part('year'::text, o.dateobs)) as last_obs,
COUNT(o.id_observation) AS nb_obs, replace(replace(t.nom_complet_html, '<i>', ''), '</i>', '') as nom_complet, t.nom_vern,
t.group2_inpn, t.patrimonial, t.protection_stricte
FROM atlas.vm_observations o
JOIN atlas.vm_taxons t ON t.cd_ref=o.cd_ref
WHERE o.insee = :thisInsee
GROUP BY o.cd_ref, t.nom_vern, t.nom_complet_html, t.group2_inpn,
t.patrimonial, t.protection_stricte
ORDER BY group2_inpn, nom_complet ASC
"""
req = connection.execute(text(sql), thisInsee=insee)
taxonCommunesList = list()
nbObsTotal = 0
for r in req:
temp = {
'nom_complet': r.nom_complet,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.last_obs,
'group2_inpn': r.group2_inpn,
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte,
}
taxonCommunesList.append(temp)
nbObsTotal = nbObsTotal + r.nb_obs
return {'taxons': taxonCommunesList, 'nbObsTotal': nbObsTotal}
# With distinct the result in a array not an object, 0: lb_nom, 1: nom_vern
def getListeTaxonsEpci(connection, nom_epci_simple):
sql = """
with taxonepci AS (
SELECT DISTINCT
o.cd_ref, max(date_part('year'::text, o.dateobs)) as last_obs,
COUNT(o.id_observation) AS nb_obs, t.nom_complet_html, t.nom_vern,
t.group2_inpn, t.patrimonial, t.protection_stricte, o.insee
FROM atlas.vm_observations o
JOIN atlas.vm_taxons t ON t.cd_ref=o.cd_ref
JOIN atlas.l_communes_epci ec ON ec.insee = o.insee
JOIN atlas.vm_epci e ON ec.id = e.id
WHERE e.nom_epci_simple = :thisNomEpciSimple
GROUP BY o.cd_ref, t.nom_vern, t.nom_complet_html, t.group2_inpn,
t.patrimonial, t.protection_stricte, o.insee
ORDER BY o.cd_ref DESC
)
select DISTINCT
cd_ref, max(last_obs) as last_obs,
SUM(nb_obs)::int AS nb_obs, replace(replace(nom_complet_html, '<i>', ''), '</i>', '') as nom_complet, nom_vern,
group2_inpn, patrimonial, protection_stricte
from taxonepci
GROUP BY cd_ref, nom_vern, nom_complet, group2_inpn,
patrimonial, protection_stricte
ORDER BY group2_inpn, nom_complet ASC
"""
req = connection.execute(text(sql), thisNomEpciSimple=nom_epci_simple)
taxonEpciList = list()
nbObsTotal = 0
for r in req:
temp = {
'nom_complet': r.nom_complet,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.last_obs,
'group2_inpn': r.group2_inpn,
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte
}
taxonEpciList.append(temp)
nbObsTotal = nbObsTotal + r.nb_obs
return {'taxons': taxonEpciList, 'nbObsTotal': nbObsTotal}
# With distinct the result in a array not an object, 0: lb_nom, 1: nom_vern
def getListeTaxonsDpt(connection, num_dpt):
sql = """
SELECT *
FROM atlas.vm_synthese_liste_taxons_dpt
WHERE num_dpt = :thisNumdpt
"""
req = connection.execute(text(sql), thisNumdpt=num_dpt)
taxonDptList = list()
nbObsTotal = 0
for r in req:
temp = {
'nom_complet': r.nom_complet,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.last_obs,
'group2_inpn': r.group2_inpn,
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte
}
taxonDptList.append(temp)
nbObsTotal = nbObsTotal + r.nb_obs
return {'taxons': taxonDptList, 'nbObsTotal': nbObsTotal}
def getTaxonsChildsList(connection, cd_ref):
sql = """
SELECT DISTINCT nom_complet_html, nb_obs, nom_vern, tax.cd_ref,
yearmax, group2_inpn, patrimonial, protection_stricte,
chemin, url, m.id_media
FROM atlas.vm_taxons tax
JOIN atlas.bib_taxref_rangs bib_rang
ON trim(tax.id_rang)= trim(bib_rang.id_rang)
LEFT JOIN atlas.vm_medias m
ON m.cd_ref = tax.cd_ref AND m.id_type={}
WHERE tax.cd_ref IN (
SELECT * FROM atlas.find_all_taxons_childs(:thiscdref)
) """.format(str(config.ATTR_MAIN_PHOTO)).encode('UTF-8')
req = connection.execute(text(sql), thiscdref=cd_ref)
taxonRankList = list()
nbObsTotal = 0
for r in req:
temp = {
'nom_complet_html': r.nom_complet_html,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.yearmax,
'group2_inpn': deleteAccent(r.group2_inpn),
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte,
'path': utils.findPath(r),
'id_media': r.id_media
}
taxonRankList.append(temp)
nbObsTotal = nbObsTotal + r.nb_obs
return {'taxons': taxonRankList, 'nbObsTotal': nbObsTotal}
def getINPNgroupPhotos(connection):
"""
Get list of INPN groups with at least one photo
"""
sql = """
SELECT DISTINCT count(*) AS nb_photos, group2_inpn
FROM atlas.vm_taxons T
JOIN atlas.vm_medias M on M.cd_ref = T.cd_ref
GROUP BY group2_inpn
ORDER BY nb_photos DESC
"""
req = connection.execute(text(sql))
groupList = list()
for r in req:
temp = {
'group': utils.deleteAccent(r.group2_inpn),
'groupAccent': r.group2_inpn
}
groupList.append(temp)
return groupList
def getTaxonsGroup(connection, groupe):
sql = """
SELECT t.cd_ref, t.nom_complet_html, t.nom_vern, t.nb_obs,
t.group2_inpn, t.protection_stricte, t.patrimonial, t.yearmax,
m.chemin, m.url, m.id_media,
t.nb_obs
FROM atlas.vm_taxons t
LEFT JOIN atlas.vm_medias m
ON m.cd_ref = t.cd_ref AND m.id_type={}
WHERE t.group2_inpn = :thisGroupe
GROUP BY t.cd_ref, t.nom_complet_html, t.nom_vern, t.nb_obs,
t.group2_inpn, t.protection_stricte, t.patrimonial, t.yearmax,
m.chemin, m.url, m.id_media
""".format(config.ATTR_MAIN_PHOTO)
req = connection.execute(text(sql), thisGroupe=groupe)
tabTaxons = list()
nbObsTotal = 0
for r in req:
nbObsTotal = nbObsTotal+r.nb_obs
temp = {
'nom_complet_html': r.nom_complet_html,
'nb_obs': r.nb_obs,
'nom_vern': r.nom_vern,
'cd_ref': r.cd_ref,
'last_obs': r.yearmax,
'group2_inpn': deleteAccent(r.group2_inpn),
'patrimonial': r.patrimonial,
'protection_stricte': r.protection_stricte,
'id_media': r.id_media,
'path': utils.findPath(r)
}
tabTaxons.append(temp)
return {'taxons': tabTaxons, 'nbObsTotal': nbObsTotal}
# get all groupINPN
def getAllINPNgroup(connection):
sql = """
SELECT SUM(nb_obs) AS som_obs, group2_inpn
FROM atlas.vm_taxons
GROUP BY group2_inpn
ORDER by som_obs DESC
"""
req = connection.execute(text(sql))
groupList = list()
for r in req:
temp = {
'group': utils.deleteAccent(r.group2_inpn),
'groupAccent': r.group2_inpn
}
groupList.append(temp)
return groupList
| 37.622159
| 134
| 0.59488
| 1,713
| 13,243
| 4.365441
| 0.089901
| 0.036106
| 0.050548
| 0.020059
| 0.866542
| 0.839797
| 0.815592
| 0.799813
| 0.792324
| 0.760631
| 0
| 0.007267
| 0.303783
| 13,243
| 351
| 135
| 37.729345
| 0.803796
| 0.040097
| 0
| 0.722222
| 0
| 0.009804
| 0.524166
| 0.056138
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035948
| false
| 0
| 0.013072
| 0.003268
| 0.084967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1e3bb2c1361f71809c7e5bb07fe62db3351cafb
| 4,500
|
py
|
Python
|
tests/test_api.py
|
barslmn/django-phenotype-ontologies
|
24a6ddd9c448c816398b33d74e03530d84f7a97f
|
[
"MIT"
] | 7
|
2018-04-10T00:37:26.000Z
|
2020-11-30T15:50:11.000Z
|
tests/test_api.py
|
barslmn/django-phenotype-ontologies
|
24a6ddd9c448c816398b33d74e03530d84f7a97f
|
[
"MIT"
] | 181
|
2018-04-09T23:55:30.000Z
|
2022-03-28T14:47:21.000Z
|
tests/test_api.py
|
barslmn/django-phenotype-ontologies
|
24a6ddd9c448c816398b33d74e03530d84f7a97f
|
[
"MIT"
] | 1
|
2021-01-18T18:57:48.000Z
|
2021-01-18T18:57:48.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test django-phenotype-ontologies
------------
Tests for `django-phenotype-ontologies` API.
"""
try:
from django.urls import reverse
except Exception:
from django.core.urlresolvers import reverse
import pytest
from rest_framework import status
from rest_framework.test import APIClient
from .fixtures import * # NOQA
@pytest.mark.django_db
def setup_client(user=None):
client = APIClient()
if user:
client.force_authenticate(user=user)
return client
def test_api_permissions():
client = setup_client()
response = client.post(reverse('phenotype_ontologies:term-list'), {})
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
response = client.post(reverse('phenotype_ontologies:crossreference-list'), {})
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
response = client.put(reverse('phenotype_ontologies:term-detail', kwargs={'pk': 1}), {})
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
response = client.put(reverse('phenotype_ontologies:crossreference-detail', kwargs={'pk': 1}), {})
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
response = client.patch(reverse('phenotype_ontologies:term-detail', kwargs={'pk': 1}), {})
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
response = client.patch(reverse('phenotype_ontologies:crossreference-detail', kwargs={'pk': 1}), {})
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
response = client.delete(reverse('phenotype_ontologies:term-detail', kwargs={'pk': 1}))
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
response = client.delete(reverse('phenotype_ontologies:crossreference-detail', kwargs={'pk': 1}))
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
@pytest.mark.django_db
def test_get_terms_list(Term):
Term(pk=999)
client = setup_client()
response = client.get(reverse('phenotype_ontologies:term-list'), format='json')
assert response.status_code == status.HTTP_200_OK
assert len(response.json().get('results', [])) == 1
observed_keys = list(response.json()['results'][0].keys())
expected_keys = [
'id',
'ontology',
'term',
'label',
'description',
'url',
'synonyms',
'xrefs',
'relationships',
'created_by',
'created',
'modified',
]
difference = set(observed_keys).difference(set(expected_keys))
assert len(difference) == 0
@pytest.mark.django_db
def test_get_terms_detail(Term):
Term(pk=999)
client = setup_client()
response = client.get(reverse('phenotype_ontologies:term-detail', kwargs={'pk': 999}), format='json')
assert response.status_code == status.HTTP_200_OK
observed_keys = list(response.json().keys())
expected_keys = [
'id',
'ontology',
'term',
'label',
'description',
'url',
'synonyms',
'xrefs',
'relationships',
'created_by',
'created',
'modified',
]
difference = set(observed_keys).difference(set(expected_keys))
assert len(difference) == 0
@pytest.mark.django_db
def test_get_xrefs_list(CrossReference):
CrossReference(pk=999)
client = setup_client()
response = client.get(reverse('phenotype_ontologies:crossreference-list'), format='json')
assert response.status_code == status.HTTP_200_OK
assert len(response.json().get('results', [])) == 1
observed_keys = list(response.json()['results'][0].keys())
expected_keys = [
'id',
'term',
'source',
'source_value',
'created',
'modified',
]
difference = set(observed_keys).difference(set(expected_keys))
assert len(difference) == 0
@pytest.mark.django_db
def test_get_xrefs_detail(CrossReference):
CrossReference(pk=999)
client = setup_client()
response = client.get(reverse('phenotype_ontologies:crossreference-detail', kwargs={'pk': 999}), format='json')
assert response.status_code == status.HTTP_200_OK
observed_keys = list(response.json().keys())
expected_keys = [
'id',
'term',
'source',
'source_value',
'created',
'modified',
]
difference = set(observed_keys).difference(set(expected_keys))
assert len(difference) == 0
| 29.605263
| 115
| 0.669556
| 517
| 4,500
| 5.611219
| 0.172147
| 0.091693
| 0.107549
| 0.099276
| 0.863151
| 0.843847
| 0.818339
| 0.818339
| 0.80524
| 0.80524
| 0
| 0.018987
| 0.192444
| 4,500
| 151
| 116
| 29.801325
| 0.779307
| 0.030667
| 0
| 0.701754
| 0
| 0
| 0.170496
| 0.100184
| 0
| 0
| 0
| 0
| 0.157895
| 1
| 0.052632
| false
| 0
| 0.052632
| 0
| 0.114035
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b81b2dc3586b69fff201df88cf4a7bb3789190fc
| 905
|
py
|
Python
|
minimal/cli/cmd_run.py
|
drstarry/minimal
|
8c3eac110505d68dabde4d014cd0968392b640f9
|
[
"MIT"
] | 2
|
2015-09-22T00:57:17.000Z
|
2016-12-07T02:18:33.000Z
|
minimal/cli/cmd_run.py
|
drstarry/minimal
|
8c3eac110505d68dabde4d014cd0968392b640f9
|
[
"MIT"
] | null | null | null |
minimal/cli/cmd_run.py
|
drstarry/minimal
|
8c3eac110505d68dabde4d014cd0968392b640f9
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import click
import os
from .cli import pass_context
@click.command('dtree', short_help='Decision Tree Classifier')
@click.option('-m', '--mode',
default='train',
type=click.Choice(['train', 'test']),
help='train or test your decision tree model')
@click.option('-f', '--file',
type=str,
help='file path of your data set')
@pass_context
def cli(ctx, mode, port):
print 'hello %s' % mode
# @click.command('knn', short_help='K Nearest Neighbors Classifier')
# @click.option('-m', '--mode',
# default='train',
# type=click.Choice(['train', 'test']),
# help='train or test your knn model')
# @click.option('-f', '--file',
# type=str,
# help='file path of your data set')
# @pass_context
# def cli(ctx, mode, port):
# print 'hello %s' % mode
| 29.193548
| 68
| 0.559116
| 114
| 905
| 4.394737
| 0.394737
| 0.087824
| 0.083832
| 0.087824
| 0.706587
| 0.706587
| 0.706587
| 0.706587
| 0.706587
| 0.706587
| 0
| 0.001513
| 0.269613
| 905
| 30
| 69
| 30.166667
| 0.75643
| 0.458564
| 0
| 0
| 0
| 0
| 0.274633
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.142857
| 0.214286
| null | null | 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
62a179034a6fb8efedfb94551af379af8bd80673
| 21,579
|
py
|
Python
|
ufcnn-keras/models/convolutional_transpose.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 51
|
2019-02-01T19:43:37.000Z
|
2022-03-16T09:07:03.000Z
|
ufcnn-keras/models/convolutional_transpose.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 2
|
2019-02-23T18:54:22.000Z
|
2019-11-09T01:30:32.000Z
|
ufcnn-keras/models/convolutional_transpose.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 35
|
2019-02-08T02:00:31.000Z
|
2022-03-01T23:17:00.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from keras import backend as K
from keras import activations, initializations, regularizers, constraints
from keras.engine import Layer, InputSpec
import tensorflow as tf
def deconv_output_length(input_length, filter_size, border_mode, stride):
print("input_lenght: {}, filter_size: {}, border_mode: {}, stride: {}".format(
input_length, filter_size, border_mode, stride
))
if input_length is None:
return None
assert border_mode in {'same', 'valid'}
if border_mode == 'same':
output_length = input_length * stride
elif border_mode == 'valid':
# output_length = input_length * stride - filter_size + 1
output_length = (input_length - 1) * stride + filter_size
return output_length
class Convolution2D_Transpose(Layer):
"""
Creates a 2D Convolution Transpose layer (sometimes called "Deconvolution").
Based on code by Xiaomin Wu in "[fchollet/keras] Anyone implemented a Deconvolutional
layer combined the Keras and Tensorflow? (#2106)"
Must be placed in the keras/layers/ directory.
W_shape --- shape of the weights - should be calculated internally
[filter_dim_y, filter_dim_x, self.nb_filter(=number of channels in output), number_of_channels_in_input]
b_shape ... shape of the biases - should be calculated internally
[0] ... nb_filter
strides
Strides of the filters
[stride_in_batch_size(must be 1), stride_y, stride_x, stride_in_depth (must be 1)]
deconv_shape
this is output_shape of TF conv2d_transpose
deconv_shape = [batch_size, output_size_y, output_size_x, number_of_filters]
padding: valid|same (small caps)
input_dim, input_length ... Keras input parameters
Also U can set the output_shape(deconv_shape) according to:
def conv_transpose_out_length(input_size, filter_size, border_mode, stride):
if input_size is None:
return None
if border_mode == 'valid':
output_size = (input_size - 1) * stride + filter_size
elif border_mode == 'same':
output_size = input_size
return output_size
"""
input_ndim = 4
def __init__(self,
init='glorot_uniform', activation='linear', weights=None,
padding='valid', strides=[1,1,1,1], deconv_shape=[], W_shape = [],b_shape=[],
W_regularizer=None, b_regularizer=None, activity_regularizer=None,
W_constraint=None, b_constraint=None, input_dim=None, input_length=None, **kwargs):
if padding not in {'valid','same'}:
raise Exception('Invalid border mode for Convolution2D:', padding)
self.deconv_shape = deconv_shape
self.init = initializations.get(init)
self.activation = activations.get(activation)
assert padding in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.padding = padding
self.strides = strides
self.W_regularizer = regularizers.get(W_regularizer)
self.b_regularizer = regularizers.get(b_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.W_shape = W_shape
self.b_shape = b_shape
self.W_constraint = constraints.get(W_constraint)
self.b_constraint = constraints.get(b_constraint)
self.constraints = [self.W_constraint, self.b_constraint]
self.initial_weights = weights
#self.input = K.placeholder(ndim=4) # old keras 0.3.x
# Keras 1.0:
self.input_spec = [InputSpec(ndim=4)]
self.initial_weights = weights
self.input_dim = input_dim
self.input_length = input_length
if self.input_dim:
kwargs['input_shape'] = (self.input_length, self.input_dim)
super(Convolution2D_Transpose, self).__init__(**kwargs)
def build(self, input_shape):
input_dim = input_shape[2]
#self.W_shape = (self.nb_filter, input_dim, self.filter_length, 1) # goven from outside
self.W = self.init(self.W_shape, name='{}_W'.format(self.name))
self.b = K.zeros((self.b_shape), name='{}_b'.format(self.name))
self.trainable_weights = [self.W, self.b]
self.regularizers = []
if self.W_regularizer:
self.W_regularizer.set_param(self.W)
self.regularizers.append(self.W_regularizer)
if self.b_regularizer:
self.b_regularizer.set_param(self.b)
self.regularizers.append(self.b_regularizer)
if self.activity_regularizer:
self.activity_regularizer.set_layer(self)
self.regularizers.append(self.activity_regularizer)
self.constraints = {}
if self.W_constraint:
self.constraints[self.W] = self.W_constraint
if self.b_constraint:
self.constraints[self.b] = self.b_constraint
if self.initial_weights is not None:
self.set_weights(self.initial_weights)
del self.initial_weights
@property
def get_output_shape(self, input_shape):
return self.deconv_shape
def call(self, X, mask=None):
#X = self.get_input(train)
X = K.permute_dimensions(X, (0, 2, 3, 1))
conv_out = tf.nn.conv2d_transpose(X, self.W, strides=self.strides,
padding=self.padding.upper(),
output_shape=self.deconv_shape)
output = conv_out + K.reshape(self.b, (1, 1, 1, self.W_shape[2]))
return K.permute_dimensions(output, (0, 3, 1, 2))
def get_config(self):
config = {
'init': self.init.__name__,
'activation': self.activation.__name__,
'padding': self.padding,
'strides': self.strides,
'W_regularizer': self.W_regularizer.get_config() if self.W_regularizer else None,
'b_regularizer': self.b_regularizer.get_config() if self.b_regularizer else None,
'activity_regularizer': self.activity_regularizer.get_config() if self.activity_regularizer else None,
'W_constraint': self.W_constraint.get_config() if self.W_constraint else None,
'b_constraint': self.b_constraint.get_config() if self.b_constraint else None,
'W_shape': self.W_shape,
'b_shape': self.b_shape,
'deconv_shape': self.deconv_shape }
base_config = super(Convolution2D_Transpose, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def get_output_shape_for(self, input_shape):
return (self.deconv_shape[0],self.deconv_shape[1],self.deconv_shape[2],self.deconv_shape[3])
class Convolution1D_Transpose(Layer):
"""
Creates a 1D Convolution Transpose layer (sometimes called "Deconvolution").
Based on code by Xiaomin Wu in "[fchollet/keras] Anyone implemented a Deconvolutional
layer combined the Keras and Tensorflow? (#2106)"
Must be placed in the keras/layers/ directory.
W_shape --- shape of the weights - should be calculated internally
[filter_dim_x, self.nb_filter(=number of channels in output), number_of_channels_in_input]
b_shape ... shape of the biases - should be calculated internally
[0] ... nb_filter
strides
Strides of the filters
[stride_in_batch_size(must be 1), stride_x, stride_in_depth (must be 1)]
deconv_shape
this is output_shape of TF conv2d_transpose
deconv_shape = [batch_size, output_size_x, number_of_filters]
padding: valid|same (small caps)
input_dim, input_length ... Keras input parameters
Also U can set the output_shape(deconv_shape) according to:
def conv_transpose_out_length(input_size, filter_size, border_mode, stride):
if input_size is None:
return None
if border_mode == 'valid':
output_size = (input_size - 1) * stride + filter_size
elif border_mode == 'same':
output_size = input_size
return output_size
"""
input_ndim = 3
def __init__(self,
init='glorot_uniform', activation='linear', weights=None,
padding='valid', strides=[1,1,1], deconv_shape=[], W_shape = [],b_shape=[],
W_regularizer=None, b_regularizer=None, activity_regularizer=None,
W_constraint=None, b_constraint=None, input_dim=None, input_length=None, **kwargs):
if padding not in {'valid','same'}:
raise Exception('Invalid border mode for Convolution1D:', padding)
#self.deconv_shape = deconv_shape
# transform 1 D in 2D
#deconv_shape = [batch_size, output_size_y, output_size_x, number_of_filters]
self.deconv_shape = [deconv_shape[0],1,deconv_shape[1],deconv_shape[2]]
self.init = initializations.get(init)
self.activation = activations.get(activation)
assert padding in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.padding = padding
self.strides = [strides[0],1,strides[1],strides[2]]
self.W_regularizer = regularizers.get(W_regularizer)
self.b_regularizer = regularizers.get(b_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.W_shape = [1, W_shape[0], W_shape[1], W_shape[2]]
self.b_shape = b_shape
self.W_constraint = constraints.get(W_constraint)
self.b_constraint = constraints.get(b_constraint)
self.constraints = [self.W_constraint, self.b_constraint]
self.initial_weights = weights
#self.input = K.placeholder(ndim=4) # old keras 0.3.x
# Keras 1.0:
self.input_spec = [InputSpec(ndim=3)]
self.initial_weights = weights
self.input_dim = input_dim
self.input_length = input_length
if self.input_dim:
kwargs['input_shape'] = (self.input_length, self.input_dim)
super(Convolution1D_Transpose, self).__init__(**kwargs)
def build(self, input_shape):
input_dim = input_shape[2]
# self.W_shape = (self.nb_filter, input_dim, self.filter_length, 1)
self.W = self.init(self.W_shape, name='{}_W'.format(self.name))
self.b = K.zeros((self.b_shape), name='{}_b'.format(self.name))
self.trainable_weights = [self.W, self.b]
self.regularizers = []
if self.W_regularizer:
self.W_regularizer.set_param(self.W)
self.regularizers.append(self.W_regularizer)
if self.b_regularizer:
self.b_regularizer.set_param(self.b)
self.regularizers.append(self.b_regularizer)
if self.activity_regularizer:
self.activity_regularizer.set_layer(self)
self.regularizers.append(self.activity_regularizer)
self.constraints = {}
if self.W_constraint:
self.constraints[self.W] = self.W_constraint
if self.b_constraint:
self.constraints[self.b] = self.b_constraint
if self.initial_weights is not None:
self.set_weights(self.initial_weights)
del self.initial_weights
@property
def get_output_shape(self, input_shape):
return self.deconv_shape
def call(self, X, mask=None):
# 1D -> 2D
X = K.expand_dims(X,2)
X = K.permute_dimensions(X, (0, 2, 3, 1))
conv_out = tf.nn.conv2d_transpose(X, self.W, strides=self.strides,
padding=self.padding.upper(),
output_shape=self.deconv_shape)
output = conv_out + K.reshape(self.b, (1, 1, 1, self.W_shape[2]))
output = K.permute_dimensions(output, (0, 3, 1, 2))
# 2D -> 1D
output = K.squeeze(output,2)
return output
def get_config(self):
config = {
'init': self.init.__name__,
'activation': self.activation.__name__,
'padding': self.padding,
'strides': self.strides,
'W_regularizer': self.W_regularizer.get_config() if self.W_regularizer else None,
'b_regularizer': self.b_regularizer.get_config() if self.b_regularizer else None,
'activity_regularizer': self.activity_regularizer.get_config() if self.activity_regularizer else None,
'W_constraint': self.W_constraint.get_config() if self.W_constraint else None,
'b_constraint': self.b_constraint.get_config() if self.b_constraint else None,
'W_shape': self.W_shape,
'b_shape': self.b_shape,
'deconv_shape': self.deconv_shape }
base_config = super(Convolution1D_Transpose, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def get_output_shape_for(self, input_shape):
#return (self.deconv_shape[0],self.deconv_shape[1],self.deconv_shape[2],self.deconv_shape[3])
return (self.deconv_shape[0],self.deconv_shape[1],self.deconv_shape[2])
class Convolution1D_Transpose_Arbitrary(Layer):
"""
Creates a 1D Convolution Transpose layer (sometimes called "Deconvolution").
Based on code by Xiaomin Wu in "[fchollet/keras] Anyone implemented a Deconvolutional
layer combined the Keras and Tensorflow? (#2106)"
Must be placed in the keras/layers/ directory.
W_shape --- shape of the weights - should be calculated internally
[filter_dim_x, self.nb_filter(=number of channels in output), number_of_channels_in_input]
b_shape ... shape of the biases - should be calculated internally
[0] ... nb_filter
strides
Strides of the filters
[stride_in_batch_size(must be 1), stride_x, stride_in_depth (must be 1)]
deconv_shape
this is output_shape of TF conv2d_transpose
deconv_shape = [batch_size, output_size_x, number_of_filters]
padding: valid|same (small caps)
input_dim, input_length ... Keras input parameters
Also U can set the output_shape(deconv_shape) according to:
def conv_transpose_out_length(input_size, filter_size, border_mode, stride):
if input_size is None:
return None
if border_mode == 'valid':
output_size = (input_size - 1) * stride + filter_size
elif border_mode == 'same':
output_size = input_size
return output_size
Convolution1D_Transpose_Arbitrary
"""
input_ndim = 3
def __init__(self, nb_filter, filter_length,
init='glorot_uniform', activation='linear', weights=None,
padding='valid', strides=[1,1,1],
W_regularizer=None, b_regularizer=None, activity_regularizer=None,
W_constraint=None, b_constraint=None, input_dim=None, input_length=None, **kwargs):
if padding not in {'valid','same'}:
raise Exception('Invalid border mode for Convolution1D:', padding)
#self.deconv_shape = deconv_shape
# transform 1 D in 2D
#deconv_shape = [batch_size, output_size_y, output_size_x, number_of_filters]
# self.deconv_shape = [deconv_shape[0],1,deconv_shape[1],deconv_shape[2]]
self.nb_filter = nb_filter
self.filter_length = filter_length
self.init = initializations.get(init)
self.activation = activations.get(activation)
assert padding in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.padding = padding
# necessary for loading, since a 4 dim. stride will be saved
if len(strides) == 3:
self.strides = [strides[0], 1, strides[1], strides[2]]
else:
self.strides = strides
self.W_regularizer = regularizers.get(W_regularizer)
self.b_regularizer = regularizers.get(b_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
# self.W_shape = [1, W_shape[0], W_shape[1], W_shape[2]]
# self.b_shape = b_shape
self.W_constraint = constraints.get(W_constraint)
self.b_constraint = constraints.get(b_constraint)
self.constraints = [self.W_constraint, self.b_constraint]
self.initial_weights = weights
#self.input = K.placeholder(ndim=4) # old keras 0.3.x
# Keras 1.0:
self.input_spec = [InputSpec(ndim=3)]
self.initial_weights = weights
self.input_dim = input_dim
self.input_length = input_length
if self.input_dim:
kwargs['input_shape'] = (self.input_length, self.input_dim)
super(Convolution1D_Transpose_Arbitrary, self).__init__(**kwargs)
def build(self, input_shape):
input_dim = input_shape[2]
# self.W_shape = (self.nb_filter, input_dim, self.filter_length, 1)
self.W_shape = (1, self.filter_length, self.nb_filter, input_dim)
print("Weights shape (filter_height, filter_width, nb_filter, input_dim): ", self.W_shape)
self.W = self.init(self.W_shape, name='{}_W'.format(self.name))
self.b = K.zeros((self.nb_filter), name='{}_b'.format(self.name))
self.trainable_weights = [self.W, self.b]
self.regularizers = []
if self.W_regularizer:
self.W_regularizer.set_param(self.W)
self.regularizers.append(self.W_regularizer)
if self.b_regularizer:
self.b_regularizer.set_param(self.b)
self.regularizers.append(self.b_regularizer)
if self.activity_regularizer:
self.activity_regularizer.set_layer(self)
self.regularizers.append(self.activity_regularizer)
self.constraints = {}
if self.W_constraint:
self.constraints[self.W] = self.W_constraint
if self.b_constraint:
self.constraints[self.b] = self.b_constraint
if self.initial_weights is not None:
self.set_weights(self.initial_weights)
del self.initial_weights
def get_output_shape_for(self, input_shape=None):
length = deconv_output_length(input_shape[1],
self.filter_length,
self.padding,
self.strides[2])
print("Output length: ", length)
return (input_shape[0], length, self.nb_filter)
def call(self, X, mask=None):
# 1D -> 2D
batch = K.shape(X)[0]
width = deconv_output_length(K.shape(X)[1],
self.filter_length,
self.padding,
self.strides[2])
print("Output width: ", width)
print("Input shape: ", K.shape(X))
X = K.expand_dims(X,2)
print("Input shape after expand: ", K.shape(X))
# X = K.permute_dimensions(X, (0, 2, 3, 1))
X = K.permute_dimensions(X, (0, 2, 1, 3))
print("Input shape after permute: ", K.shape(X))
deconv_shape = tf.pack([batch, 1, width, self.nb_filter])
print("Deconv shape: ", deconv_shape)
conv_out = tf.nn.conv2d_transpose(X, self.W, strides=self.strides,
padding=self.padding.upper(),
output_shape=deconv_shape)
output = conv_out + K.reshape(self.b, (1, 1, 1, self.W_shape[2]))
print("Output shape: ", K.shape(output))
# output = K.permute_dimensions(output, (0, 3, 1, 2))
output = K.permute_dimensions(output, (0, 2, 1, 3))
print("Output shape after permute: ", K.shape(output))
# 2D -> 1D
output = K.squeeze(output,2)
print("Output shape after squeeze: ", K.shape(output))
return output
def get_config(self):
config = {
'init': self.init.__name__,
'activation': self.activation.__name__,
'padding': self.padding,
'strides': self.strides,
'W_regularizer': self.W_regularizer.get_config() if self.W_regularizer else None,
'b_regularizer': self.b_regularizer.get_config() if self.b_regularizer else None,
'activity_regularizer': self.activity_regularizer.get_config() if self.activity_regularizer else None,
'W_constraint': self.W_constraint.get_config() if self.W_constraint else None,
'b_constraint': self.b_constraint.get_config() if self.b_constraint else None,
'filter_length': self.filter_length,
'nb_filter': self.nb_filter,
'input_length': self.input_length,
'input_dim': self.input_dim
}
base_config = super(Convolution1D_Transpose_Arbitrary, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@property
def get_output_shape(self, input_shape):
#return (self.deconv_shape[0],self.deconv_shape[1],self.deconv_shape[2],self.deconv_shape[3])
return self.get_output_shape_for(input_shape=input_shape)
| 41.658301
| 120
| 0.624496
| 2,697
| 21,579
| 4.749722
| 0.064145
| 0.026151
| 0.030445
| 0.017564
| 0.902888
| 0.882358
| 0.873458
| 0.859094
| 0.852069
| 0.840671
| 0
| 0.012454
| 0.274387
| 21,579
| 517
| 121
| 41.738878
| 0.805658
| 0.239909
| 0
| 0.755102
| 0
| 0
| 0.070491
| 0
| 0
| 0
| 0
| 0
| 0.013605
| 1
| 0.064626
| false
| 0
| 0.017007
| 0.017007
| 0.14966
| 0.037415
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62cdf78168136abc2f375f53a986bc097dac1a1a
| 4,439
|
py
|
Python
|
ckanext/canada/tests/test_functional.py
|
HoussamBedja/ckanext-canada
|
9099223beb088c65262cab403be10774e29e06b8
|
[
"MIT"
] | 31
|
2015-04-19T16:14:55.000Z
|
2021-08-20T13:18:44.000Z
|
ckanext/canada/tests/test_functional.py
|
HoussamBedja/ckanext-canada
|
9099223beb088c65262cab403be10774e29e06b8
|
[
"MIT"
] | 214
|
2015-01-20T20:43:26.000Z
|
2022-03-29T20:36:01.000Z
|
ckanext/canada/tests/test_functional.py
|
HoussamBedja/ckanext-canada
|
9099223beb088c65262cab403be10774e29e06b8
|
[
"MIT"
] | 46
|
2015-02-18T17:11:06.000Z
|
2022-01-17T17:05:09.000Z
|
import cgi
import datetime
from nose.tools import assert_equal
from nose.plugins.skip import SkipTest
from ckan import plugins
from ckan.tests import *
import ckan.model as model
from ckan.lib.create_test_data import CreateTestData
from ckan.tests.helpers import FunctionalTestBase
class TestNew(FunctionalTestBase):
pkg_names = []
def test_new_required_fields(self):
raise SkipTest('XXX: need to update for new forms')
offset = url_for(controller='package', action='new')
res = self.app.get(offset, extra_environ=self.extra_environ_tester)
assert 'Create dataset' in res
fv = res.forms['dataset-form']
fv['owner_org'] = '9391E0A2-9717-4755-B548-4499C21F917B' # nrcan
fv['title'] = 'english title'
fv['title_fra'] = 'french title'
fv['notes'] = 'english description'
fv['notes_fra'] = 'french description'
fv.set('subject', True, index=1)
fv['keywords'] = 'english keywords'
fv['keywords_fra'] = 'french keywords'
fv['date_published'] = '2000-01-01'
fv['maintenance_and_update_frequency'] = 'As Needed | Au besoin'
# Submit
res = fv.submit('save', extra_environ=self.extra_environ_tester)
# Check dataset page
assert not 'Error' in res, res
res = self.app.get(res.header('Location'),
extra_environ=self.extra_environ_tester)
fv = res.forms['dataset-form']
fv['name'] = 'english resource name'
fv['name_fra'] = 'french resource name'
fv['resource_type'] = 'file'
fv['url'] = 'somewhere'
fv['format'] = 'TXT'
fv['language'] = 'zxx; CAN'
# Submit
res = fv.submit('save', 2,
extra_environ=self.extra_environ_tester)
# Check resource page
assert not 'Error' in res, res
def test_new_missing_fields(self):
raise SkipTest('XXX: need to update for new forms')
offset = url_for(controller='package', action='new')
res = self.app.get(offset, extra_environ=self.extra_environ_tester)
assert 'Create dataset' in res
fv = res.forms['dataset-form']
fv['owner_org'] = '9391E0A2-9717-4755-B548-4499C21F917B' # nrcan
# Submit
res = fv.submit('save', extra_environ=self.extra_environ_tester)
assert 'Error' in res, res
assert 'Title French:Missing value' in res, res
assert 'Subject:Missing value' in res, res
assert 'Title English:Missing value' in res, res
assert 'Description English:Missing value' in res, res
assert 'Description French:Missing value' in res, res
assert 'Tags English:Missing value' in res, res
assert 'Tags French:Missing value' in res, res
assert 'Date Published:Missing value' in res, res
assert 'Frequency:Missing value' in res, res
fv = res.forms['dataset-form']
fv['title'] = 'english title'
fv['title_fra'] = 'french title'
fv['notes'] = 'english description'
fv['notes_fra'] = 'french description'
fv.set('subject', True, index=1)
fv['keywords'] = 'english keywords'
fv['keywords_fra'] = 'french keywords'
fv['date_published'] = '2000-01-01'
fv['maintenance_and_update_frequency'] = 'As Needed | Au besoin'
# Submit
res = fv.submit('save', extra_environ=self.extra_environ_tester)
# Check dataset page
assert 'Error' not in res, res
res = self.app.get(res.header('Location'),
extra_environ=self.extra_environ_tester)
fv = res.forms['dataset-form']
fv['url'] = 'somewhere'
# Submit
res = fv.submit('save', 2,
extra_environ=self.extra_environ_tester)
assert 'Error' in res, res
assert 'Title English:Missing value' in res, res
assert 'Title French:Missing value' in res, res
assert 'Format:Missing value' in res, res
assert 'Language:Missing value' in res, res
fv = res.forms['dataset-form']
fv['name'] = 'english resource name'
fv['name_fra'] = 'french resource name'
fv['resource_type'] = 'file'
fv['format'] = 'TXT'
fv['language'] = 'zxx; CAN'
# Submit
res = fv.submit('save', 2,
extra_environ=self.extra_environ_tester)
# Check resource page
assert not 'Error' in res, res
| 37.302521
| 75
| 0.619509
| 559
| 4,439
| 4.808587
| 0.187835
| 0.039063
| 0.056548
| 0.067708
| 0.856771
| 0.856771
| 0.837426
| 0.790923
| 0.770461
| 0.770461
| 0
| 0.022317
| 0.263122
| 4,439
| 118
| 76
| 37.618644
| 0.79945
| 0.029511
| 0
| 0.747253
| 0
| 0
| 0.313388
| 0.031665
| 0
| 0
| 0
| 0
| 0.241758
| 1
| 0.021978
| false
| 0
| 0.098901
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a2dd183bda396e3c6b60dbc0094e69fd4afdade
| 27,518
|
py
|
Python
|
sdk/python/pulumi_ovh/ip_loadbalancing_tcp_frontend.py
|
tumblewader/pulumi-ovh
|
fd484de69a247cf4f05c22cf73f1c57b973a1dab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_ovh/ip_loadbalancing_tcp_frontend.py
|
tumblewader/pulumi-ovh
|
fd484de69a247cf4f05c22cf73f1c57b973a1dab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_ovh/ip_loadbalancing_tcp_frontend.py
|
tumblewader/pulumi-ovh
|
fd484de69a247cf4f05c22cf73f1c57b973a1dab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['IPLoadbalancingTCPFrontendArgs', 'IPLoadbalancingTCPFrontend']
@pulumi.input_type
class IPLoadbalancingTCPFrontendArgs:
def __init__(__self__, *,
port: pulumi.Input[str],
service_name: pulumi.Input[str],
zone: pulumi.Input[str],
allowed_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dedicated_ipfos: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
default_farm_id: Optional[pulumi.Input[int]] = None,
default_ssl_id: Optional[pulumi.Input[int]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
ssl: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a IPLoadbalancingTCPFrontend resource.
:param pulumi.Input[str] port: Port(s) attached to your frontend. Supports single port (numerical value),
range (2 dash-delimited increasing ports) and comma-separated list of 'single port'
and/or 'range'. Each port must be in the [1;49151] range
:param pulumi.Input[str] service_name: The internal name of your IP load balancing
:param pulumi.Input[str] zone: Zone where the frontend will be defined (ie. `gra`, `bhs` also supports `all`)
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_sources: Restrict IP Load Balancing access to these ip block. No restriction if null. List of IP blocks.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dedicated_ipfos: Only attach frontend on these ip. No restriction if null. List of Ip blocks.
:param pulumi.Input[int] default_farm_id: Default TCP Farm of your frontend
:param pulumi.Input[int] default_ssl_id: Default ssl served to your customer
:param pulumi.Input[bool] disabled: Disable your frontend. Default: 'false'
:param pulumi.Input[str] display_name: Human readable name for your frontend, this field is for you
:param pulumi.Input[bool] ssl: SSL deciphering. Default: 'false'
"""
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "service_name", service_name)
pulumi.set(__self__, "zone", zone)
if allowed_sources is not None:
pulumi.set(__self__, "allowed_sources", allowed_sources)
if dedicated_ipfos is not None:
pulumi.set(__self__, "dedicated_ipfos", dedicated_ipfos)
if default_farm_id is not None:
pulumi.set(__self__, "default_farm_id", default_farm_id)
if default_ssl_id is not None:
pulumi.set(__self__, "default_ssl_id", default_ssl_id)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if ssl is not None:
pulumi.set(__self__, "ssl", ssl)
@property
@pulumi.getter
def port(self) -> pulumi.Input[str]:
"""
Port(s) attached to your frontend. Supports single port (numerical value),
range (2 dash-delimited increasing ports) and comma-separated list of 'single port'
and/or 'range'. Each port must be in the [1;49151] range
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: pulumi.Input[str]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
The internal name of your IP load balancing
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter
def zone(self) -> pulumi.Input[str]:
"""
Zone where the frontend will be defined (ie. `gra`, `bhs` also supports `all`)
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: pulumi.Input[str]):
pulumi.set(self, "zone", value)
@property
@pulumi.getter(name="allowedSources")
def allowed_sources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Restrict IP Load Balancing access to these ip block. No restriction if null. List of IP blocks.
"""
return pulumi.get(self, "allowed_sources")
@allowed_sources.setter
def allowed_sources(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_sources", value)
@property
@pulumi.getter(name="dedicatedIpfos")
def dedicated_ipfos(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Only attach frontend on these ip. No restriction if null. List of Ip blocks.
"""
return pulumi.get(self, "dedicated_ipfos")
@dedicated_ipfos.setter
def dedicated_ipfos(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dedicated_ipfos", value)
@property
@pulumi.getter(name="defaultFarmId")
def default_farm_id(self) -> Optional[pulumi.Input[int]]:
"""
Default TCP Farm of your frontend
"""
return pulumi.get(self, "default_farm_id")
@default_farm_id.setter
def default_farm_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_farm_id", value)
@property
@pulumi.getter(name="defaultSslId")
def default_ssl_id(self) -> Optional[pulumi.Input[int]]:
"""
Default ssl served to your customer
"""
return pulumi.get(self, "default_ssl_id")
@default_ssl_id.setter
def default_ssl_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_ssl_id", value)
@property
@pulumi.getter
def disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Disable your frontend. Default: 'false'
"""
return pulumi.get(self, "disabled")
@disabled.setter
def disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disabled", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Human readable name for your frontend, this field is for you
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def ssl(self) -> Optional[pulumi.Input[bool]]:
"""
SSL deciphering. Default: 'false'
"""
return pulumi.get(self, "ssl")
@ssl.setter
def ssl(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ssl", value)
@pulumi.input_type
class _IPLoadbalancingTCPFrontendState:
def __init__(__self__, *,
allowed_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dedicated_ipfos: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
default_farm_id: Optional[pulumi.Input[int]] = None,
default_ssl_id: Optional[pulumi.Input[int]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
ssl: Optional[pulumi.Input[bool]] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering IPLoadbalancingTCPFrontend resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_sources: Restrict IP Load Balancing access to these ip block. No restriction if null. List of IP blocks.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dedicated_ipfos: Only attach frontend on these ip. No restriction if null. List of Ip blocks.
:param pulumi.Input[int] default_farm_id: Default TCP Farm of your frontend
:param pulumi.Input[int] default_ssl_id: Default ssl served to your customer
:param pulumi.Input[bool] disabled: Disable your frontend. Default: 'false'
:param pulumi.Input[str] display_name: Human readable name for your frontend, this field is for you
:param pulumi.Input[str] port: Port(s) attached to your frontend. Supports single port (numerical value),
range (2 dash-delimited increasing ports) and comma-separated list of 'single port'
and/or 'range'. Each port must be in the [1;49151] range
:param pulumi.Input[str] service_name: The internal name of your IP load balancing
:param pulumi.Input[bool] ssl: SSL deciphering. Default: 'false'
:param pulumi.Input[str] zone: Zone where the frontend will be defined (ie. `gra`, `bhs` also supports `all`)
"""
if allowed_sources is not None:
pulumi.set(__self__, "allowed_sources", allowed_sources)
if dedicated_ipfos is not None:
pulumi.set(__self__, "dedicated_ipfos", dedicated_ipfos)
if default_farm_id is not None:
pulumi.set(__self__, "default_farm_id", default_farm_id)
if default_ssl_id is not None:
pulumi.set(__self__, "default_ssl_id", default_ssl_id)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if port is not None:
pulumi.set(__self__, "port", port)
if service_name is not None:
pulumi.set(__self__, "service_name", service_name)
if ssl is not None:
pulumi.set(__self__, "ssl", ssl)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="allowedSources")
def allowed_sources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Restrict IP Load Balancing access to these ip block. No restriction if null. List of IP blocks.
"""
return pulumi.get(self, "allowed_sources")
@allowed_sources.setter
def allowed_sources(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_sources", value)
@property
@pulumi.getter(name="dedicatedIpfos")
def dedicated_ipfos(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Only attach frontend on these ip. No restriction if null. List of Ip blocks.
"""
return pulumi.get(self, "dedicated_ipfos")
@dedicated_ipfos.setter
def dedicated_ipfos(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dedicated_ipfos", value)
@property
@pulumi.getter(name="defaultFarmId")
def default_farm_id(self) -> Optional[pulumi.Input[int]]:
"""
Default TCP Farm of your frontend
"""
return pulumi.get(self, "default_farm_id")
@default_farm_id.setter
def default_farm_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_farm_id", value)
@property
@pulumi.getter(name="defaultSslId")
def default_ssl_id(self) -> Optional[pulumi.Input[int]]:
"""
Default ssl served to your customer
"""
return pulumi.get(self, "default_ssl_id")
@default_ssl_id.setter
def default_ssl_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_ssl_id", value)
@property
@pulumi.getter
def disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Disable your frontend. Default: 'false'
"""
return pulumi.get(self, "disabled")
@disabled.setter
def disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disabled", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Human readable name for your frontend, this field is for you
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[str]]:
"""
Port(s) attached to your frontend. Supports single port (numerical value),
range (2 dash-delimited increasing ports) and comma-separated list of 'single port'
and/or 'range'. Each port must be in the [1;49151] range
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> Optional[pulumi.Input[str]]:
"""
The internal name of your IP load balancing
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter
def ssl(self) -> Optional[pulumi.Input[bool]]:
"""
SSL deciphering. Default: 'false'
"""
return pulumi.get(self, "ssl")
@ssl.setter
def ssl(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ssl", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
"""
Zone where the frontend will be defined (ie. `gra`, `bhs` also supports `all`)
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
class IPLoadbalancingTCPFrontend(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dedicated_ipfos: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
default_farm_id: Optional[pulumi.Input[int]] = None,
default_ssl_id: Optional[pulumi.Input[int]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
ssl: Optional[pulumi.Input[bool]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a backend server group (frontend) to be used by loadbalancing frontend(s)
## Example Usage
```python
import pulumi
import pulumi_ovh as ovh
lb = ovh.get_ip_loadbalancing(service_name="ip-1.2.3.4",
state="ok")
farm80 = ovh.IPLoadbalancingTCPFarm("farm80",
display_name="ingress-8080-gra",
port=80,
service_name=lb.service_name,
zone="all")
testfrontend = ovh.IPLoadbalancingTCPFrontend("testfrontend",
default_farm_id=farm80.id,
display_name="ingress-8080-gra",
port="80,443",
service_name=lb.service_name,
zone="all")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_sources: Restrict IP Load Balancing access to these ip block. No restriction if null. List of IP blocks.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dedicated_ipfos: Only attach frontend on these ip. No restriction if null. List of Ip blocks.
:param pulumi.Input[int] default_farm_id: Default TCP Farm of your frontend
:param pulumi.Input[int] default_ssl_id: Default ssl served to your customer
:param pulumi.Input[bool] disabled: Disable your frontend. Default: 'false'
:param pulumi.Input[str] display_name: Human readable name for your frontend, this field is for you
:param pulumi.Input[str] port: Port(s) attached to your frontend. Supports single port (numerical value),
range (2 dash-delimited increasing ports) and comma-separated list of 'single port'
and/or 'range'. Each port must be in the [1;49151] range
:param pulumi.Input[str] service_name: The internal name of your IP load balancing
:param pulumi.Input[bool] ssl: SSL deciphering. Default: 'false'
:param pulumi.Input[str] zone: Zone where the frontend will be defined (ie. `gra`, `bhs` also supports `all`)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IPLoadbalancingTCPFrontendArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a backend server group (frontend) to be used by loadbalancing frontend(s)
## Example Usage
```python
import pulumi
import pulumi_ovh as ovh
lb = ovh.get_ip_loadbalancing(service_name="ip-1.2.3.4",
state="ok")
farm80 = ovh.IPLoadbalancingTCPFarm("farm80",
display_name="ingress-8080-gra",
port=80,
service_name=lb.service_name,
zone="all")
testfrontend = ovh.IPLoadbalancingTCPFrontend("testfrontend",
default_farm_id=farm80.id,
display_name="ingress-8080-gra",
port="80,443",
service_name=lb.service_name,
zone="all")
```
:param str resource_name: The name of the resource.
:param IPLoadbalancingTCPFrontendArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IPLoadbalancingTCPFrontendArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dedicated_ipfos: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
default_farm_id: Optional[pulumi.Input[int]] = None,
default_ssl_id: Optional[pulumi.Input[int]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
ssl: Optional[pulumi.Input[bool]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IPLoadbalancingTCPFrontendArgs.__new__(IPLoadbalancingTCPFrontendArgs)
__props__.__dict__["allowed_sources"] = allowed_sources
__props__.__dict__["dedicated_ipfos"] = dedicated_ipfos
__props__.__dict__["default_farm_id"] = default_farm_id
__props__.__dict__["default_ssl_id"] = default_ssl_id
__props__.__dict__["disabled"] = disabled
__props__.__dict__["display_name"] = display_name
if port is None and not opts.urn:
raise TypeError("Missing required property 'port'")
__props__.__dict__["port"] = port
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__.__dict__["service_name"] = service_name
__props__.__dict__["ssl"] = ssl
if zone is None and not opts.urn:
raise TypeError("Missing required property 'zone'")
__props__.__dict__["zone"] = zone
super(IPLoadbalancingTCPFrontend, __self__).__init__(
'ovh:index/iPLoadbalancingTCPFrontend:IPLoadbalancingTCPFrontend',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
allowed_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dedicated_ipfos: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
default_farm_id: Optional[pulumi.Input[int]] = None,
default_ssl_id: Optional[pulumi.Input[int]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
ssl: Optional[pulumi.Input[bool]] = None,
zone: Optional[pulumi.Input[str]] = None) -> 'IPLoadbalancingTCPFrontend':
"""
Get an existing IPLoadbalancingTCPFrontend resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_sources: Restrict IP Load Balancing access to these ip block. No restriction if null. List of IP blocks.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dedicated_ipfos: Only attach frontend on these ip. No restriction if null. List of Ip blocks.
:param pulumi.Input[int] default_farm_id: Default TCP Farm of your frontend
:param pulumi.Input[int] default_ssl_id: Default ssl served to your customer
:param pulumi.Input[bool] disabled: Disable your frontend. Default: 'false'
:param pulumi.Input[str] display_name: Human readable name for your frontend, this field is for you
:param pulumi.Input[str] port: Port(s) attached to your frontend. Supports single port (numerical value),
range (2 dash-delimited increasing ports) and comma-separated list of 'single port'
and/or 'range'. Each port must be in the [1;49151] range
:param pulumi.Input[str] service_name: The internal name of your IP load balancing
:param pulumi.Input[bool] ssl: SSL deciphering. Default: 'false'
:param pulumi.Input[str] zone: Zone where the frontend will be defined (ie. `gra`, `bhs` also supports `all`)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IPLoadbalancingTCPFrontendState.__new__(_IPLoadbalancingTCPFrontendState)
__props__.__dict__["allowed_sources"] = allowed_sources
__props__.__dict__["dedicated_ipfos"] = dedicated_ipfos
__props__.__dict__["default_farm_id"] = default_farm_id
__props__.__dict__["default_ssl_id"] = default_ssl_id
__props__.__dict__["disabled"] = disabled
__props__.__dict__["display_name"] = display_name
__props__.__dict__["port"] = port
__props__.__dict__["service_name"] = service_name
__props__.__dict__["ssl"] = ssl
__props__.__dict__["zone"] = zone
return IPLoadbalancingTCPFrontend(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowedSources")
def allowed_sources(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Restrict IP Load Balancing access to these ip block. No restriction if null. List of IP blocks.
"""
return pulumi.get(self, "allowed_sources")
@property
@pulumi.getter(name="dedicatedIpfos")
def dedicated_ipfos(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Only attach frontend on these ip. No restriction if null. List of Ip blocks.
"""
return pulumi.get(self, "dedicated_ipfos")
@property
@pulumi.getter(name="defaultFarmId")
def default_farm_id(self) -> pulumi.Output[int]:
"""
Default TCP Farm of your frontend
"""
return pulumi.get(self, "default_farm_id")
@property
@pulumi.getter(name="defaultSslId")
def default_ssl_id(self) -> pulumi.Output[int]:
"""
Default ssl served to your customer
"""
return pulumi.get(self, "default_ssl_id")
@property
@pulumi.getter
def disabled(self) -> pulumi.Output[Optional[bool]]:
"""
Disable your frontend. Default: 'false'
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[Optional[str]]:
"""
Human readable name for your frontend, this field is for you
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def port(self) -> pulumi.Output[str]:
"""
Port(s) attached to your frontend. Supports single port (numerical value),
range (2 dash-delimited increasing ports) and comma-separated list of 'single port'
and/or 'range'. Each port must be in the [1;49151] range
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Output[str]:
"""
The internal name of your IP load balancing
"""
return pulumi.get(self, "service_name")
@property
@pulumi.getter
def ssl(self) -> pulumi.Output[Optional[bool]]:
"""
SSL deciphering. Default: 'false'
"""
return pulumi.get(self, "ssl")
@property
@pulumi.getter
def zone(self) -> pulumi.Output[str]:
"""
Zone where the frontend will be defined (ie. `gra`, `bhs` also supports `all`)
"""
return pulumi.get(self, "zone")
| 43.679365
| 169
| 0.639036
| 3,297
| 27,518
| 5.134061
| 0.064604
| 0.103976
| 0.09092
| 0.033674
| 0.882023
| 0.864063
| 0.849826
| 0.831275
| 0.825604
| 0.811012
| 0
| 0.004862
| 0.252635
| 27,518
| 629
| 170
| 43.748808
| 0.818195
| 0.325205
| 0
| 0.782857
| 1
| 0
| 0.091289
| 0.008546
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162857
| false
| 0.002857
| 0.014286
| 0
| 0.274286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7f4f683c1851eb9b46d003963ac4e23db9b3c1f
| 135
|
py
|
Python
|
rlpy/Tools/__init__.py
|
imanolarrieta/RL
|
072a8c328652f45e053baecd640f04adf7f84b49
|
[
"BSD-3-Clause"
] | 1
|
2019-12-07T13:47:43.000Z
|
2019-12-07T13:47:43.000Z
|
rlpy/Tools/__init__.py
|
imanolarrieta/RL
|
072a8c328652f45e053baecd640f04adf7f84b49
|
[
"BSD-3-Clause"
] | null | null | null |
rlpy/Tools/__init__.py
|
imanolarrieta/RL
|
072a8c328652f45e053baecd640f04adf7f84b49
|
[
"BSD-3-Clause"
] | null | null | null |
from .GeneralTools import *
from .PriorityQueueWithNovelty import PriorityQueueWithNovelty
from .GeneralTools import __rlpy_location__
| 33.75
| 62
| 0.881481
| 12
| 135
| 9.5
| 0.5
| 0.280702
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 135
| 3
| 63
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c52e2b8e61ebd8060f22ae93d5d1bad37b5b6c17
| 11,809
|
py
|
Python
|
core/number_objects/lattice.py
|
mike006322/PolynomialCalculator
|
bf56b0e773a3461ab2aa958d0d90e08f80a4d201
|
[
"MIT"
] | null | null | null |
core/number_objects/lattice.py
|
mike006322/PolynomialCalculator
|
bf56b0e773a3461ab2aa958d0d90e08f80a4d201
|
[
"MIT"
] | null | null | null |
core/number_objects/lattice.py
|
mike006322/PolynomialCalculator
|
bf56b0e773a3461ab2aa958d0d90e08f80a4d201
|
[
"MIT"
] | null | null | null |
from number_objects.primitives.matrix import *
from core.lll import lll_reduction
from core.norms import euclidean_norm
class Lattice:
def __init__(self, matrix):
self.matrix = Matrix(matrix)
@property
def center_density(self):
# logging.info('Calculating center density of \n' + str(self))
b = Matrix(lll_reduction(self.matrix, .75)) # LLL basis reduction
# logging.debug('LLL reduced matrix: \n' + str(b))
# b = [[2261337070362461927454409267102922016, 2786201470971518444733667209602978520, -4063137902647177185063144484695148536, -2671959977702871335511367439824858800, 531672722663545453899956709733136784, -11066285142314948047878309605319720, 27810585035274635766780802193736, 80477592476538094337833811509248000, -1086760859784102657122293398528000, 9752254479710833049833929818112000, 9747833549555398956714000626688000, -51587604063166080587210730086400000, -60524907661568022921499268505600000, -492200319343401593562465996005376000, 583462545454901183949295133442048000], [-899494188371811636420443028386860751295, -1108434625918842986975056473301497593175, 1615769677917180582267507871877690824470, 1063379506967919084269378312198014464750, -211564555755008400078692722238064433455, 4403463552145413177657728884859128425, -11066285142314948047878309605319720, -32003324972857913192572676601692160000, 418985478692327218883000558146560000, -3840815241769921138963494739653120000, -3839262916845861056057412820515840000, 20325827699389244764010216109312000000, 23839618579450476114878719213455360000, 196079568186737020563995814905333760000, -232024106053219870646151905362268160000], [43100299379683388889025306183843867615329, 53169099307165431861888597272242091941905, -77270457311786540396305318689886221189234, -51146681589945024535858232882889736308450, 10165649632268733478209358027269080213121, -211564555755008400078692722238064433455, 531672722663545453899956709733136784, 1530560239010093486735522955513741312000, -15851279489922542611520014844178432000, 170593780024180617058405787354500608000, 170581396709595165194156236181655552000, -905426580732699462975835833557145600000, -1059633711003548915688215664111605760000, -9499470687531140177304292383099869184000, 11096561732823177778832541427474624512000], [-210846862850656692460971283486127288794050, -262755519394180862834491203449237567567250, 370245689432997527217142649947144030761300, 259921835205938196981636999480683713102500, -51146681589945024535858232882889736308450, 1063379506967919084269378312198014464750, -2671959977702871335511367439824858800, -7343390920768507626233985969264230400000, 15433380772089999495294136579430400000, -166762717036890406752492126544665600000, -166751881626298859537055537361766400000, 885152340637373384292412828177920000000, 1035854617331760007596258444166656000000, 51678346405920014039159918494110412800000, -53239584175571680290196398277165670400000], [-368106481519924773904534414350886532772066, -446691286081632524832614881103596134385770, 723129597447356174884835729247351174805836, 370245689432997527217142649947144030761300, -77270457311786540396305318689886221189234, 1615769677917180582267507871877690824470, -4063137902647177185063144484695148536, -13994397828070648025765945501897883648000, -5829319061605845730998746597667843072000, -6011525998430165632182239817015155712000, -6011515158598643929532710107903065088000, 37414913485358908516602361642287206400000, 37565624699356893541848541546814361600000, 45834085077100561926156783103565746176000, -101459384253512198186803104888759656448000], [200016301500560260910831484790674354345345, 338734329471008175012037829217871929160425, -446691286081632524832614881103596134385770, -262755519394180862834491203449237567567250, 53169099307165431861888597272242091941905, -1108434625918842986975056473301497593175, 2786201470971518444733667209602978520, 8181567146312233886267482488147394560000, 5828901162888013187882520719403095040000, 6015357061417455842488153477824990720000, 6015344673681940235189810806722954240000, -37435187725454234595285784647666432000000, 4785059293227585030885560693160560640000, -46029672444967955545127216452475074560000, 59316361810763695675439248039068610560000], [6619003582329282486339386856031961088000, 8181567146312233886267482488147394560000, -13994397828070648025765945501897883648000, -7343390920768507626233985969264230400000, 1530560239010093486735522955513741312000, -32003324972857913192572676601692160000, 80477592476538094337833811509248000, 347488319211581183983156828569600000000, -403086450285434173420461921140736000000, -403086450285434173420461921140736000000, -403086450285434173420461921140736000000, -403086450285434173420461921140736000000, -403086450285434173420461921140736000000, -403086450285434173420461921140736000000, 2519290314283963583877887007129600000000], [240758424397864669688220786069151921658721, 200016301500560260910831484790674354345345, -368106481519924773904534414350886532772066, -210846862850656692460971283486127288794050, 43100299379683388889025306183843867615329, -899494188371811636420443028386860751295, 2261337070362461927454409267102922016, 6619003582329282486339386856031961088000, 5845171427856628057712923734805420032000, 5840922466151505304290784195730836992000, 5840924014055499208939597157720782848000, 5865027769234121593280120861919897600000, -36505930463445683058137404383434250240000, -36334122189250078347258928254469871616000, 47987775971887298025960554706231717888000], [5845171427856628057712923734805420032000, 5828901162888013187882520719403095040000, -5829319061605845730998746597667843072000, 15433380772089999495294136579430400000, -15851279489922542611520014844178432000, 418985478692327218883000558146560000, -1086760859784102657122293398528000, -403086450285434173420461921140736000000, 18264854778558735983114680801689600000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000], [5840922466151505304290784195730836992000, 6015357061417455842488153477824990720000, -6011525998430165632182239817015155712000, -166762717036890406752492126544665600000, 170593780024180617058405787354500608000, -3840815241769921138963494739653120000, 9752254479710833049833929818112000, -403086450285434173420461921140736000000, -2922376764569397757298348928270336000000, 18264854778558735983114680801689600000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000], [5840924014055499208939597157720782848000, 6015344673681940235189810806722954240000, -6011515158598643929532710107903065088000, -166751881626298859537055537361766400000, 170581396709595165194156236181655552000, -3839262916845861056057412820515840000, 9747833549555398956714000626688000, -403086450285434173420461921140736000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, 18264854778558735983114680801689600000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000], [5865027769234121593280120861919897600000, -37435187725454234595285784647666432000000, 37414913485358908516602361642287206400000, 885152340637373384292412828177920000000, -905426580732699462975835833557145600000, 20325827699389244764010216109312000000, -51587604063166080587210730086400000, -403086450285434173420461921140736000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, 18264854778558735983114680801689600000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000], [-36505930463445683058137404383434250240000, 4785059293227585030885560693160560640000, 37565624699356893541848541546814361600000, 1035854617331760007596258444166656000000, -1059633711003548915688215664111605760000, 23839618579450476114878719213455360000, -60524907661568022921499268505600000, -403086450285434173420461921140736000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, 18264854778558735983114680801689600000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000], [-36334122189250078347258928254469871616000, -46029672444967955545127216452475074560000, 45834085077100561926156783103565746176000, 51678346405920014039159918494110412800000, -9499470687531140177304292383099869184000, 196079568186737020563995814905333760000, -492200319343401593562465996005376000, -403086450285434173420461921140736000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, 18264854778558735983114680801689600000000, -2922376764569397757298348928270336000000], [47987775971887298025960554706231717888000, 59316361810763695675439248039068610560000, -101459384253512198186803104888759656448000, -53239584175571680290196398277165670400000, 11096561732823177778832541427474624512000, -232024106053219870646151905362268160000, 583462545454901183949295133442048000, 2519290314283963583877887007129600000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, -2922376764569397757298348928270336000000, 18264854778558735983114680801689600000000]]
# b = [list(map(Rational, x)) for x in b]
b = Matrix(b)
r = euclidean_norm(b[0]) / 2 # radius
d = len(self.matrix[0]) # dimension
bb = b * b.transpose()
# logging.debug('Calculating determinant to get center density.')
det_bb = bb.determinant()
# logging.debug('determinant: ' + str(det_bb))
# print(det_bb)
det_bb **= .5
# logging.debug('sqr root of det:' + str(det_bb) + str(type(det_bb)))
# logging.debug('type r**d: ' + str(type(r**d)))
# logging.debug(str(r**d / det_bb))
return float(r ** d / det_bb)
def __repr__(self):
return str(self.matrix)
def __str__(self):
return str(self.matrix)
if __name__ == '__main__':
print(1/(4*(2**.5)))
L = Lattice([[1, 1, 1], [-1, 0, 2], [3, 5, 6]])
print(float(L.center_density))
# R_i = D * 1.1
L = Lattice([[-0.0433884297520686, 0.9566115702479883, 0.0216942148760343], [0.9783057851239667, -0.02169421487603307, 0.9132231404958682], [-0.9783057851239669, 0.02169421487603307, 1.8915289256198347]])
# R_i = D * 2
print(float(L.center_density))
L = Lattice([[-0.18749999999999992, 0.875, 0.09374999999999983], [0.8229166666666899, -0.09375000000000266, 0.8541666666666909], [-0.9270833333333333, 0.09375, 1.5729166666666665]])
print(float(L.center_density))
# R_i = D * 5
L = Lattice([[-0.13499999999999995, 0.875, -0.09000000000000008], [0.8333333333333451, -0.1200000000000017, 0.8333333333333451], [-0.9166666666666666, 0.12, 1.5833333333333333]])
print(float(L.center_density))
# R_i = D * 8
L = Lattice([[-0.12890624999999994, 0.875, -0.11132812500000011], [0.8333333333333214, -0.12304687499999825, 0.8333333333333214], [-0.9166666666666666, 0.123046875, 1.5833333333333333]])
print(float(L.center_density))
| 210.875
| 9,492
| 0.876789
| 529
| 11,809
| 19.491493
| 0.351607
| 0.232761
| 0.232761
| 0.186209
| 0.241296
| 0.204248
| 0.013481
| 0.007856
| 0
| 0
| 0
| 0.850594
| 0.0597
| 11,809
| 55
| 9,493
| 214.709091
| 0.07799
| 0.84605
| 0
| 0.21875
| 0
| 0
| 0.004447
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.09375
| 0.0625
| 0.34375
| 0.1875
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c53c4465e7403529ced263ea85a13229b5e71a33
| 145
|
py
|
Python
|
privacy_evaluator/datasets/__init__.py
|
mariesig/privacy-evaluator
|
4e6ced65cc71bb661aef4518192517e23e22595e
|
[
"MIT"
] | null | null | null |
privacy_evaluator/datasets/__init__.py
|
mariesig/privacy-evaluator
|
4e6ced65cc71bb661aef4518192517e23e22595e
|
[
"MIT"
] | null | null | null |
privacy_evaluator/datasets/__init__.py
|
mariesig/privacy-evaluator
|
4e6ced65cc71bb661aef4518192517e23e22595e
|
[
"MIT"
] | null | null | null |
"""
Module providing datasets.
"""
from privacy_evaluator.datasets.dataset import Dataset
from privacy_evaluator.datasets.cifar10 import CIFAR10
| 24.166667
| 54
| 0.834483
| 17
| 145
| 7
| 0.529412
| 0.184874
| 0.336134
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.089655
| 145
| 5
| 55
| 29
| 0.871212
| 0.17931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.