hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8ab4162b6bc2480734a53698529a09dede205349 | 275 | py | Python | csrank/dataset_reader/choicefunctions/__init__.py | helegraf/cs-ranking | a635d59a254e7d4cad06c3e04f7593392e0b9cec | [
"Apache-2.0"
] | null | null | null | csrank/dataset_reader/choicefunctions/__init__.py | helegraf/cs-ranking | a635d59a254e7d4cad06c3e04f7593392e0b9cec | [
"Apache-2.0"
] | null | null | null | csrank/dataset_reader/choicefunctions/__init__.py | helegraf/cs-ranking | a635d59a254e7d4cad06c3e04f7593392e0b9cec | [
"Apache-2.0"
] | null | null | null | from .choice_data_generator import ChoiceDatasetGenerator
from .expedia_choice_dataset_reader import ExpediaChoiceDatasetReader
from .letor_ranking_choice_dataset_reader import LetorRankingChoiceDatasetReader
from .mnist_choice_dataset_reader import MNISTChoiceDatasetReader
| 55 | 80 | 0.927273 | 28 | 275 | 8.678571 | 0.535714 | 0.160494 | 0.234568 | 0.308642 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.058182 | 275 | 4 | 81 | 68.75 | 0.938224 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
8abee21a5a2c56285da002c9483142103075e834 | 65 | py | Python | collection/31.py | nemero/py_neural | 87f151097f8c331a06f13b96c4cec9a1ee663abf | [
"MIT"
] | null | null | null | collection/31.py | nemero/py_neural | 87f151097f8c331a06f13b96c4cec9a1ee663abf | [
"MIT"
] | 1 | 2017-01-18T18:35:03.000Z | 2017-01-25T08:55:49.000Z | collection/31.py | nemero/py_neural | 87f151097f8c331a06f13b96c4cec9a1ee663abf | [
"MIT"
] | null | null | null | 0 0 0 1 1
0 0 0 0
0 0 0 0
0 1 1 1
0 0 0 1
0 1 1 1
0 0 0 1
0 1 1 1 | 8.125 | 9 | 0.507692 | 33 | 65 | 1 | 0.060606 | 0.848485 | 0.909091 | 0.727273 | 1 | 0.848485 | 0.848485 | 0.848485 | 0.606061 | 0.606061 | 0 | 1 | 0.492308 | 65 | 8 | 10 | 8.125 | 0 | 0 | 0 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 |
76e17bde513b694b8bf99be67f6e652b56a97168 | 37,340 | py | Python | sdk/tables/azure-data-tables/tests/test_table_batch.py | annatisch/azure-sdk-for-python | 593881a216c6c6092df081ac902eafe6d93187c6 | [
"MIT"
] | null | null | null | sdk/tables/azure-data-tables/tests/test_table_batch.py | annatisch/azure-sdk-for-python | 593881a216c6c6092df081ac902eafe6d93187c6 | [
"MIT"
] | 1 | 2019-08-05T19:14:28.000Z | 2019-08-05T19:30:05.000Z | sdk/tables/azure-data-tables/tests/test_table_batch.py | annatisch/azure-sdk-for-python | 593881a216c6c6092df081ac902eafe6d93187c6 | [
"MIT"
] | 1 | 2016-04-19T22:15:47.000Z | 2016-04-19T22:15:47.000Z | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
from datetime import datetime, timedelta
import os
import sys
from devtools_testutils import AzureTestCase
from azure.core import MatchConditions
from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential
from azure.core.exceptions import (
ResourceNotFoundError,
ClientAuthenticationError,
HttpResponseError
)
from azure.data.tables import (
EdmType,
TableEntity,
EntityProperty,
UpdateMode,
TableTransactionError,
TableServiceClient,
TableEntity,
UpdateMode,
generate_table_sas,
TableSasPermissions,
RequestTooLargeError,
TransactionOperation,
TableErrorCode
)
from _shared.testcase import TableTestCase
from preparers import tables_decorator
#------------------------------------------------------------------------------
TEST_TABLE_PREFIX = 'table'
#------------------------------------------------------------------------------
class StorageTableBatchTest(AzureTestCase, TableTestCase):
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_single_insert(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = '001'
entity['RowKey'] = 'batch_insert'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
batch = [('create', entity)]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' in transaction_result[0]
e = self.table.get_entity(row_key=entity['RowKey'], partition_key=entity['PartitionKey'])
assert e['test'] == entity['test'].value
assert e['test2'] == entity['test2']
assert e['test3'] == entity['test3']
assert e['test4'] == entity['test4'].value
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_single_update(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = '001'
entity['RowKey'] = 'batch_insert'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
resp = self.table.create_entity(entity)
assert resp is not None
entity['test3'] = 5
entity['test5'] = datetime.utcnow()
batch = [('update', entity, {'mode':UpdateMode.MERGE})]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' in transaction_result[0]
result = self.table.get_entity(row_key=entity['RowKey'], partition_key=entity['PartitionKey'])
assert result['PartitionKey'] == u'001'
assert result['RowKey'] == u'batch_insert'
assert result['test3'] == 5
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_update(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = u'001'
entity['RowKey'] = u'batch_update'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = u'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
entity['test6'] = (2 ** 40, "Edm.Int64")
self.table.create_entity(entity)
entity = self.table.get_entity(u'001', u'batch_update')
assert 3 == entity['test3']
entity['test2'] = u'value1'
batch = [('update', entity)]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' in transaction_result[0]
result = self.table.get_entity('001', 'batch_update')
assert 'value1' == result['test2']
assert entity['PartitionKey'] == u'001'
assert entity['RowKey'] == u'batch_update'
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_merge(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = u'001'
entity['RowKey'] = u'batch_merge'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = u'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
self.table.create_entity(entity)
resp_entity = self.table.get_entity(partition_key=u'001', row_key=u'batch_merge')
assert 3 == entity['test3']
entity = TableEntity()
entity['PartitionKey'] = u'001'
entity['RowKey'] = u'batch_merge'
entity['test2'] = u'value1'
batch = [('update', entity, {'mode': UpdateMode.MERGE})]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' in transaction_result[0]
resp_entity = self.table.get_entity(partition_key=u'001', row_key=u'batch_merge')
assert entity['test2'] == resp_entity['test2']
assert 1234567890 == resp_entity['test4']
assert entity['PartitionKey'] == resp_entity['PartitionKey']
assert entity['RowKey'] == resp_entity['RowKey']
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_update_if_match(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict()
resp = self.table.create_entity(entity=entity)
etag = resp['etag']
# Act
sent_entity = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
batch = [(
'update',
sent_entity,
{'etag': etag, 'match_condition':MatchConditions.IfNotModified, 'mode':UpdateMode.REPLACE}
)]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' in transaction_result[0]
entity = self.table.get_entity(partition_key=entity['PartitionKey'], row_key=entity['RowKey'])
self._assert_updated_entity(entity)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_update_if_doesnt_match(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict()
self.table.create_entity(entity)
# Act
sent_entity1 = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
batch = [(
'update',
sent_entity1,
{'etag': u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"', 'match_condition':MatchConditions.IfNotModified}
)]
with pytest.raises(TableTransactionError) as error:
self.table.submit_transaction(batch)
assert error.value.status_code == 412
assert error.value.error_code == TableErrorCode.update_condition_not_satisfied
# Assert
received_entity = self.table.get_entity(entity['PartitionKey'], entity['RowKey'])
self._assert_default_entity(received_entity)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_single_op_if_doesnt_match(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = 'batch_inserts'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
batch = []
transaction_count = 0
for i in range(10):
entity['RowKey'] = str(i)
batch.append(('create', entity.copy()))
transaction_count += 1
entity = self._create_random_entity_dict()
self.table.create_entity(entity)
# Act
sent_entity1 = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
batch = [(
'update',
sent_entity1,
{'etag':u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"', 'match_condition': MatchConditions.IfNotModified}
)]
with pytest.raises(TableTransactionError) as error:
self.table.submit_transaction(batch)
assert error.value.status_code == 412
assert error.value.error_code == TableErrorCode.update_condition_not_satisfied
# Assert
received_entity = self.table.get_entity(entity['PartitionKey'], entity['RowKey'])
self._assert_default_entity(received_entity)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_insert_replace(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = '001'
entity['RowKey'] = 'batch_insert_replace'
entity['test'] = True
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
batch = [('upsert', entity, {'mode': UpdateMode.REPLACE})]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' in transaction_result[0]
entity = self.table.get_entity('001', 'batch_insert_replace')
assert entity is not None
assert 'value' == entity['test2']
assert 1234567890 == entity['test4']
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_insert_merge(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = '001'
entity['RowKey'] = 'batch_insert_merge'
entity['test'] = True
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
batch = [('upsert', entity, {'mode': UpdateMode.MERGE})]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' in transaction_result[0]
entity = self.table.get_entity('001', 'batch_insert_merge')
assert entity is not None
assert 'value' == entity['test2']
assert 1234567890 == entity['test4']
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_delete(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = u'001'
entity['RowKey'] = u'batch_delete'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = u'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
self.table.create_entity(entity)
entity = self.table.get_entity(partition_key=u'001', row_key=u'batch_delete')
assert 3 == entity['test3']
batch = [('delete', entity)]
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, 1)
assert 'etag' not in transaction_result[0]
with pytest.raises(ResourceNotFoundError):
entity = self.table.get_entity(partition_key=entity['PartitionKey'], row_key=entity['RowKey'])
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_inserts(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = 'batch_inserts'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
transaction_count = 0
batch = []
for i in range(100):
entity['RowKey'] = str(i)
batch.append(('create', entity.copy()))
transaction_count += 1
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, transaction_count)
assert 'etag' in transaction_result[0]
entities = list(self.table.query_entities("PartitionKey eq 'batch_inserts'"))
# Assert
assert entities is not None
assert transaction_count == len(entities)
e = self.table.get_entity('batch_inserts', '1')
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_all_operations_together(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
# Act
entity = TableEntity()
entity['PartitionKey'] = '003'
entity['RowKey'] = 'batch_all_operations_together-1'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
self.table.create_entity(entity)
entity['RowKey'] = 'batch_all_operations_together-2'
self.table.create_entity(entity)
entity['RowKey'] = 'batch_all_operations_together-3'
self.table.create_entity(entity)
entity['RowKey'] = 'batch_all_operations_together-4'
self.table.create_entity(entity)
transaction_count = 0
batch = []
entity['RowKey'] = 'batch_all_operations_together'
batch.append((TransactionOperation.CREATE, entity.copy()))
transaction_count += 1
entity['RowKey'] = 'batch_all_operations_together-1'
batch.append((TransactionOperation.DELETE, entity.copy()))
transaction_count += 1
entity['RowKey'] = 'batch_all_operations_together-2'
entity['test3'] = 10
batch.append((TransactionOperation.UPDATE, entity.copy()))
transaction_count += 1
entity['RowKey'] = 'batch_all_operations_together-3'
entity['test3'] = 100
batch.append((TransactionOperation.UPDATE, entity.copy(), {'mode': UpdateMode.REPLACE}))
transaction_count += 1
entity['RowKey'] = 'batch_all_operations_together-4'
entity['test3'] = 10
batch.append((TransactionOperation.UPSERT, entity.copy()))
transaction_count += 1
entity['RowKey'] = 'batch_all_operations_together-5'
batch.append((TransactionOperation.UPSERT, entity.copy(), {'mode': UpdateMode.REPLACE}))
transaction_count += 1
transaction_result = self.table.submit_transaction(batch)
# Assert
self._assert_valid_batch_transaction(transaction_result, transaction_count)
assert 'etag' in transaction_result[0]
assert 'etag' not in transaction_result[1]
assert 'etag' in transaction_result[2]
assert 'etag' in transaction_result[3]
assert 'etag' in transaction_result[4]
assert 'etag' in transaction_result[5]
# Assert
entities = list(self.table.query_entities("PartitionKey eq '003'"))
assert 5 == len(entities)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_reuse(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
table2_name = self._get_table_reference('table2')
table2 = self.ts.get_table_client(table2_name)
table2.create_table()
# Act
entity = TableEntity()
entity['PartitionKey'] = '003'
entity['RowKey'] = 'batch_all_operations_together-1'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
entity['test5'] = datetime.utcnow()
batch = []
batch.append(('upsert', entity.copy()))
entity['RowKey'] = 'batch_all_operations_together-2'
batch.append(('upsert', entity.copy()))
entity['RowKey'] = 'batch_all_operations_together-3'
batch.append(('upsert', entity.copy()))
entity['RowKey'] = 'batch_all_operations_together-4'
batch.append(('upsert', entity.copy()))
resp1 = self.table.submit_transaction(batch)
resp2 = table2.submit_transaction(batch)
entities = list(self.table.query_entities("PartitionKey eq '003'"))
assert 4 == len(entities)
table2 = list(table2.query_entities("PartitionKey eq '003'"))
assert 4 == len(entities)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_same_row_operations_fail(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict('001', 'batch_negative_1')
self.table.create_entity(entity)
# Act
batch = []
entity = self._create_updated_entity_dict(
'001', 'batch_negative_1')
batch.append(('update', entity.copy()))
entity = self._create_random_entity_dict(
'001', 'batch_negative_1')
batch.append(('update', entity.copy(), {'mode': UpdateMode.REPLACE}))
# Assert
with pytest.raises(TableTransactionError):
self.table.submit_transaction(batch)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_different_partition_operations_fail(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict('001', 'batch_negative_1')
self.table.create_entity(entity)
# Act
batch = []
entity = self._create_updated_entity_dict(
'001', 'batch_negative_1')
batch.append(('update', entity.copy()))
entity = self._create_random_entity_dict(
'002', 'batch_negative_1')
batch.append(('update', entity.copy()))
with pytest.raises(ValueError):
self.table.submit_transaction(batch)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_too_many_ops(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict('001', 'batch_negative_1')
self.table.create_entity(entity)
# Act
with pytest.raises(TableTransactionError):
batch = []
for i in range(0, 101):
entity = TableEntity()
entity['PartitionKey'] = 'large'
entity['RowKey'] = 'item{0}'.format(i)
batch.append(('create', entity.copy()))
self.table.submit_transaction(batch)
# Assert
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_different_partition_keys(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict('001', 'batch_negative_1')
entity2 = self._create_random_entity_dict('002', 'batch_negative_1')
batch = [('create', entity), ('create', entity2)]
with pytest.raises(ValueError):
self.table.submit_transaction(batch)
# Assert
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_new_non_existent_table(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict('001', 'batch_negative_1')
tc = self.ts.get_table_client("doesntexist")
batch = [('create', entity)]
with pytest.raises(TableTransactionError):
resp = tc.submit_transaction(batch)
# Assert
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_new_invalid_key(self, tables_storage_account_name, tables_primary_storage_account_key):
invalid_key = tables_primary_storage_account_key.named_key.key[0:-6] + "==" # cut off a bit from the end to invalidate
tables_primary_storage_account_key = AzureNamedKeyCredential(tables_storage_account_name, invalid_key)
credential = AzureNamedKeyCredential(name=tables_storage_account_name, key=tables_primary_storage_account_key.named_key.key)
self.ts = TableServiceClient(self.account_url(tables_storage_account_name, "table"), credential=credential)
self.table_name = self.get_resource_name('uttable')
self.table = self.ts.get_table_client(self.table_name)
entity = self._create_random_entity_dict('001', 'batch_negative_1')
batch = [('create', entity)]
with pytest.raises(ClientAuthenticationError):
resp = self.table.submit_transaction(batch)
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_new_delete_nonexistent_entity(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict('001', 'batch_negative_1')
batch = [('delete', entity)]
with pytest.raises(TableTransactionError):
resp = self.table.submit_transaction(batch)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_delete_batch_with_bad_kwarg(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
entity = self._create_random_entity_dict('001', 'batch_negative_1')
self.table.create_entity(entity)
received = self.table.get_entity(entity["PartitionKey"], entity["RowKey"])
good_etag = received.metadata["etag"]
received.metadata["etag"] = u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"'
batch = [('delete', received, {"match_condition": MatchConditions.IfNotModified})]
with pytest.raises(TableTransactionError) as error:
self.table.submit_transaction(batch)
assert error.value.status_code == 412
assert error.value.error_code == TableErrorCode.update_condition_not_satisfied
received.metadata["etag"] = good_etag
batch = [('delete', received, {"match_condition": MatchConditions.IfNotModified})]
resp = self.table.submit_transaction(batch)
assert resp is not None
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@pytest.mark.live_test_only
@tables_decorator
def test_batch_sas_auth(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
token = self.generate_sas(
generate_table_sas,
tables_primary_storage_account_key,
self.table_name,
permission=TableSasPermissions(add=True, read=True, update=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
start=datetime.utcnow() - timedelta(minutes=1),
)
token = AzureSasCredential(token)
# Act
service = TableServiceClient(
self.account_url(tables_storage_account_name, "table"),
credential=token,
)
table = service.get_table_client(self.table_name)
entity = TableEntity()
entity['PartitionKey'] = 'batch_inserts'
entity['test'] = EntityProperty(True, EdmType.BOOLEAN)
entity['test2'] = 'value'
entity['test3'] = 3
entity['test4'] = EntityProperty(1234567890, EdmType.INT32)
batch = []
transaction_count = 0
for i in range(10):
entity['RowKey'] = str(i)
batch.append(('create', entity.copy()))
transaction_count += 1
transaction_result = table.submit_transaction(batch)
assert transaction_result
total_entities = 0
for e in table.list_entities():
total_entities += 1
assert total_entities == transaction_count
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@pytest.mark.live_test_only # Request bodies are very large
@tables_decorator
def test_batch_request_too_large(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
batch = []
entity = {
'PartitionKey': 'pk001',
'Foo': os.urandom(1024*64),
'Bar': os.urandom(1024*64),
'Baz': os.urandom(1024*64)
}
for i in range(50):
entity['RowKey'] = str(i)
batch.append(('create', entity.copy()))
with pytest.raises(RequestTooLargeError):
self.table.submit_transaction(batch)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_with_mode(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
table2_name = self._get_table_reference('table2')
table2 = self.ts.get_table_client(table2_name)
table2.create_table()
# Act
entity1 = {
"PartitionKey": "pk001",
"RowKey": "rk001",
"Value": 1,
"day": "Monday",
"float": 1.001
}
entity2 = {
"PartitionKey": "pk001",
"RowKey": "rk002",
"Value": 1,
"day": "Monday",
"float": 1.001
}
batch = [
("upsert", entity1, {"mode": "merge"}),
("upsert", entity2, {"mode": "replace"})
]
resp = self.table.submit_transaction(batch)
assert len(resp) == 2
with pytest.raises(ValueError):
batch = [
("upsert", entity1, {"mode": "foo"}),
("upsert", entity2, {"mode": "bar"})
]
self.table.submit_transaction(batch)
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_with_specialchar_partitionkey(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
table2_name = self._get_table_reference('table2')
table2 = self.ts.get_table_client(table2_name)
table2.create_table()
# Act
entity1 = {
'PartitionKey': "A'aaa\"_bbbb2",
'RowKey': '"A\'aaa"_bbbb2',
'test': '"A\'aaa"_bbbb2'
}
self.table.submit_transaction([("create", entity1)])
get_entity = self.table.get_entity(
partition_key=entity1['PartitionKey'],
row_key=entity1['RowKey'])
assert get_entity == entity1
self.table.submit_transaction([("upsert", entity1, {'mode': 'merge'})])
get_entity = self.table.get_entity(
partition_key=entity1['PartitionKey'],
row_key=entity1['RowKey'])
assert get_entity == entity1
self.table.submit_transaction([("upsert", entity1, {'mode': 'replace'})])
get_entity = self.table.get_entity(
partition_key=entity1['PartitionKey'],
row_key=entity1['RowKey'])
assert get_entity == entity1
self.table.submit_transaction([("update", entity1, {'mode': 'merge'})])
get_entity = self.table.get_entity(
partition_key=entity1['PartitionKey'],
row_key=entity1['RowKey'])
assert get_entity == entity1
self.table.submit_transaction([("update", entity1, {'mode': 'replace'})])
get_entity = self.table.get_entity(
partition_key=entity1['PartitionKey'],
row_key=entity1['RowKey'])
assert get_entity == entity1
entity_results = list(self.table.list_entities())
assert entity_results[0] == entity1
for entity in entity_results:
get_entity = self.table.get_entity(
partition_key=entity['PartitionKey'],
row_key=entity['RowKey'])
assert get_entity == entity1
self.table.submit_transaction([("delete", entity1)])
finally:
self._tear_down()
@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@tables_decorator
def test_batch_with_specialchar_partitionkey_optout(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:
self.table.prepare_key = lambda k: k
# Act
entity1 = {
'PartitionKey': "A'aaa\"_bbbb2",
'RowKey': '"A\'aaa"_bbbb2',
'test': '"A\'aaa"_bbbb2'
}
self.table.submit_transaction([("create", entity1)])
with pytest.raises(HttpResponseError):
self.table.get_entity(
partition_key=entity1['PartitionKey'],
row_key=entity1['RowKey'])
with pytest.raises(TableTransactionError):
self.table.submit_transaction([("upsert", entity1, {'mode': 'merge'})])
with pytest.raises(TableTransactionError):
self.table.submit_transaction([("update", entity1, {'mode': 'replace'})])
entity_results = list(self.table.list_entities())
for entity in entity_results:
get_entity = self.table.get_entity(
partition_key=entity['PartitionKey'].replace("'", "''"),
row_key=entity['RowKey'].replace("'", "''"))
assert get_entity == entity == entity1
with pytest.raises(TableTransactionError):
self.table.submit_transaction([("delete", entity1)])
finally:
self._tear_down() | 40.808743 | 132 | 0.605865 | 3,822 | 37,340 | 5.621141 | 0.069859 | 0.071681 | 0.051201 | 0.061441 | 0.853519 | 0.828849 | 0.802783 | 0.784305 | 0.762288 | 0.747905 | 0 | 0.027513 | 0.2797 | 37,340 | 915 | 133 | 40.808743 | 0.771267 | 0.025442 | 0 | 0.732374 | 0 | 0 | 0.09946 | 0.011896 | 0 | 0 | 0 | 0 | 0.107914 | 1 | 0.03741 | false | 0 | 0.015827 | 0 | 0.054676 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0a0e643e3f48f6fc458be3375ed09d7479d57e9a | 53,021 | py | Python | msgraph/cli/command_modules/devicescorpmgt/azext_devicescorpmgt/generated/action.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | null | null | null | msgraph/cli/command_modules/devicescorpmgt/azext_devicescorpmgt/generated/action.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | 22 | 2022-03-29T22:54:37.000Z | 2022-03-29T22:55:27.000Z | msgraph/cli/command_modules/devicescorpmgt/azext_devicescorpmgt/generated/action.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=protected-access
# pylint: disable=no-self-use
import argparse
from collections import defaultdict
from knack.util import CLIError
class AddMobileAppCategories(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddMobileAppCategories, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter mobile-app-categories. All possible keys are:'
' display-name, last-modified-date-time, id'.format(k)
)
return d
class AddVppTokens(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddVppTokens, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'apple-id':
d['apple_id'] = v[0]
elif kl == 'automatically-update-apps':
d['automatically_update_apps'] = v[0]
elif kl == 'country-or-region':
d['country_or_region'] = v[0]
elif kl == 'expiration-date-time':
d['expiration_date_time'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'last-sync-date-time':
d['last_sync_date_time'] = v[0]
elif kl == 'last-sync-status':
d['last_sync_status'] = v[0]
elif kl == 'organization-name':
d['organization_name'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'token':
d['token'] = v[0]
elif kl == 'vpp-token-account-type':
d['vpp_token_account_type'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter vpp-tokens. All possible keys are: apple-id,'
' automatically-update-apps, country-or-region, expiration-date-time, last-modified-date-time,'
' last-sync-date-time, last-sync-status, organization-name, state, token,'
' vpp-token-account-type, id'.format(k)
)
return d
class AddManagedAppPolicies(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddManagedAppPolicies, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter managed-app-policies. All possible keys are:'
' created-date-time, description, display-name, last-modified-date-time, version, id'.format(k)
)
return d
class AddManagedAppStatuses(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddManagedAppStatuses, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter managed-app-statuses. All possible keys are:'
' display-name, version, id'.format(k)
)
return d
class AddMdmWindowsInformationProtectionPolicies(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddMdmWindowsInformationProtectionPolicies, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'azure-rights-management-services-allowed':
d['azure_rights_management_services_allowed'] = v[0]
elif kl == 'data-recovery-certificate':
d['data_recovery_certificate'] = v[0]
elif kl == 'enforcement-level':
d['enforcement_level'] = v[0]
elif kl == 'enterprise-domain':
d['enterprise_domain'] = v[0]
elif kl == 'enterprise-internal-proxy-servers':
d['enterprise_internal_proxy_servers'] = v
elif kl == 'enterprise-ip-ranges':
d['enterprise_ip_ranges'] = v
elif kl == 'enterprise-ip-ranges-are-authoritative':
d['enterprise_ip_ranges_are_authoritative'] = v[0]
elif kl == 'enterprise-network-domain-names':
d['enterprise_network_domain_names'] = v
elif kl == 'enterprise-protected-domain-names':
d['enterprise_protected_domain_names'] = v
elif kl == 'enterprise-proxied-domains':
d['enterprise_proxied_domains'] = v
elif kl == 'enterprise-proxy-servers':
d['enterprise_proxy_servers'] = v
elif kl == 'enterprise-proxy-servers-are-authoritative':
d['enterprise_proxy_servers_are_authoritative'] = v[0]
elif kl == 'exempt-apps':
d['exempt_apps'] = v
elif kl == 'icons-visible':
d['icons_visible'] = v[0]
elif kl == 'indexing-encrypted-stores-or-items-blocked':
d['indexing_encrypted_stores_or_items_blocked'] = v[0]
elif kl == 'is-assigned':
d['is_assigned'] = v[0]
elif kl == 'neutral-domain-resources':
d['neutral_domain_resources'] = v
elif kl == 'protected-apps':
d['protected_apps'] = v
elif kl == 'protection-under-lock-config-required':
d['protection_under_lock_config_required'] = v[0]
elif kl == 'revoke-on-unenroll-disabled':
d['revoke_on_unenroll_disabled'] = v[0]
elif kl == 'rights-management-services-template-id':
d['rights_management_services_template_id'] = v[0]
elif kl == 'smb-auto-encrypted-file-extensions':
d['smb_auto_encrypted_file_extensions'] = v
elif kl == 'assignments':
d['assignments'] = v
elif kl == 'exempt-app-locker-files':
d['exempt_app_locker_files'] = v
elif kl == 'protected-app-locker-files':
d['protected_app_locker_files'] = v
elif kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter mdm-windows-information-protection-policies. All'
' possible keys are: azure-rights-management-services-allowed, data-recovery-certificate,'
' enforcement-level, enterprise-domain, enterprise-internal-proxy-servers, enterprise-ip-ranges,'
' enterprise-ip-ranges-are-authoritative, enterprise-network-domain-names,'
' enterprise-protected-domain-names, enterprise-proxied-domains, enterprise-proxy-servers,'
' enterprise-proxy-servers-are-authoritative, exempt-apps, icons-visible,'
' indexing-encrypted-stores-or-items-blocked, is-assigned, neutral-domain-resources,'
' protected-apps, protection-under-lock-config-required, revoke-on-unenroll-disabled,'
' rights-management-services-template-id, smb-auto-encrypted-file-extensions, assignments,'
' exempt-app-locker-files, protected-app-locker-files, created-date-time, description,'
' display-name, last-modified-date-time, version, id'.format(k)
)
return d
class AddWindowsInformationProtectionPolicies(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddWindowsInformationProtectionPolicies, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'days-without-contact-before-unenroll':
d['days_without_contact_before_unenroll'] = v[0]
elif kl == 'mdm-enrollment-url':
d['mdm_enrollment_url'] = v[0]
elif kl == 'minutes-of-inactivity-before-device-lock':
d['minutes_of_inactivity_before_device_lock'] = v[0]
elif kl == 'number-of-past-pins-remembered':
d['number_of_past_pins_remembered'] = v[0]
elif kl == 'password-maximum-attempt-count':
d['password_maximum_attempt_count'] = v[0]
elif kl == 'pin-expiration-days':
d['pin_expiration_days'] = v[0]
elif kl == 'pin-lowercase-letters':
d['pin_lowercase_letters'] = v[0]
elif kl == 'pin-minimum-length':
d['pin_minimum_length'] = v[0]
elif kl == 'pin-special-characters':
d['pin_special_characters'] = v[0]
elif kl == 'pin-uppercase-letters':
d['pin_uppercase_letters'] = v[0]
elif kl == 'revoke-on-mdm-handoff-disabled':
d['revoke_on_mdm_handoff_disabled'] = v[0]
elif kl == 'windows-hello-for-business-blocked':
d['windows_hello_for_business_blocked'] = v[0]
elif kl == 'azure-rights-management-services-allowed':
d['azure_rights_management_services_allowed'] = v[0]
elif kl == 'data-recovery-certificate':
d['data_recovery_certificate'] = v[0]
elif kl == 'enforcement-level':
d['enforcement_level'] = v[0]
elif kl == 'enterprise-domain':
d['enterprise_domain'] = v[0]
elif kl == 'enterprise-internal-proxy-servers':
d['enterprise_internal_proxy_servers'] = v
elif kl == 'enterprise-ip-ranges':
d['enterprise_ip_ranges'] = v
elif kl == 'enterprise-ip-ranges-are-authoritative':
d['enterprise_ip_ranges_are_authoritative'] = v[0]
elif kl == 'enterprise-network-domain-names':
d['enterprise_network_domain_names'] = v
elif kl == 'enterprise-protected-domain-names':
d['enterprise_protected_domain_names'] = v
elif kl == 'enterprise-proxied-domains':
d['enterprise_proxied_domains'] = v
elif kl == 'enterprise-proxy-servers':
d['enterprise_proxy_servers'] = v
elif kl == 'enterprise-proxy-servers-are-authoritative':
d['enterprise_proxy_servers_are_authoritative'] = v[0]
elif kl == 'exempt-apps':
d['exempt_apps'] = v
elif kl == 'icons-visible':
d['icons_visible'] = v[0]
elif kl == 'indexing-encrypted-stores-or-items-blocked':
d['indexing_encrypted_stores_or_items_blocked'] = v[0]
elif kl == 'is-assigned':
d['is_assigned'] = v[0]
elif kl == 'neutral-domain-resources':
d['neutral_domain_resources'] = v
elif kl == 'protected-apps':
d['protected_apps'] = v
elif kl == 'protection-under-lock-config-required':
d['protection_under_lock_config_required'] = v[0]
elif kl == 'revoke-on-unenroll-disabled':
d['revoke_on_unenroll_disabled'] = v[0]
elif kl == 'rights-management-services-template-id':
d['rights_management_services_template_id'] = v[0]
elif kl == 'smb-auto-encrypted-file-extensions':
d['smb_auto_encrypted_file_extensions'] = v
elif kl == 'assignments':
d['assignments'] = v
elif kl == 'exempt-app-locker-files':
d['exempt_app_locker_files'] = v
elif kl == 'protected-app-locker-files':
d['protected_app_locker_files'] = v
elif kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter windows-information-protection-policies. All possible'
' keys are: days-without-contact-before-unenroll, mdm-enrollment-url,'
' minutes-of-inactivity-before-device-lock, number-of-past-pins-remembered,'
' password-maximum-attempt-count, pin-expiration-days, pin-lowercase-letters, pin-minimum-length,'
' pin-special-characters, pin-uppercase-letters, revoke-on-mdm-handoff-disabled,'
' windows-hello-for-business-blocked, azure-rights-management-services-allowed,'
' data-recovery-certificate, enforcement-level, enterprise-domain,'
' enterprise-internal-proxy-servers, enterprise-ip-ranges, enterprise-ip-ranges-are-authoritative,'
' enterprise-network-domain-names, enterprise-protected-domain-names, enterprise-proxied-domains,'
' enterprise-proxy-servers, enterprise-proxy-servers-are-authoritative, exempt-apps, icons-visible,'
' indexing-encrypted-stores-or-items-blocked, is-assigned, neutral-domain-resources,'
' protected-apps, protection-under-lock-config-required, revoke-on-unenroll-disabled,'
' rights-management-services-template-id, smb-auto-encrypted-file-extensions, assignments,'
' exempt-app-locker-files, protected-app-locker-files, created-date-time, description,'
' display-name, last-modified-date-time, version, id'.format(k)
)
return d
class AddAppliedPolicies(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddAppliedPolicies, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter applied-policies. All possible keys are:'
' created-date-time, description, display-name, last-modified-date-time, version, id'.format(k)
)
return d
class AddIntendedPolicies(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddIntendedPolicies, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter intended-policies. All possible keys are:'
' created-date-time, description, display-name, last-modified-date-time, version, id'.format(k)
)
return d
class AddOperations(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddOperations, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter operations. All possible keys are: display-name,'
' last-modified-date-time, state, version, id'.format(k)
)
return d
class AddLargeCover(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.large_cover = action
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'type':
d['type'] = v[0]
elif kl == 'value':
d['value'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter large-cover. All possible keys are: type, value'
.format(k)
)
return d
class AddDeviceappmanagementDeviceStates(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddDeviceappmanagementDeviceStates, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'device-id':
d['device_id'] = v[0]
elif kl == 'device-name':
d['device_name'] = v[0]
elif kl == 'error-code':
d['error_code'] = v[0]
elif kl == 'install-state':
d['install_state'] = v[0]
elif kl == 'last-sync-date-time':
d['last_sync_date_time'] = v[0]
elif kl == 'os-description':
d['os_description'] = v[0]
elif kl == 'os-version':
d['os_version'] = v[0]
elif kl == 'user-name':
d['user_name'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter device-states. All possible keys are: device-id,'
' device-name, error-code, install-state, last-sync-date-time, os-description, os-version,'
' user-name, id'.format(k)
)
return d
class AddInstallSummary(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.install_summary = action
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'failed-device-count':
d['failed_device_count'] = v[0]
elif kl == 'failed-user-count':
d['failed_user_count'] = v[0]
elif kl == 'installed-device-count':
d['installed_device_count'] = v[0]
elif kl == 'installed-user-count':
d['installed_user_count'] = v[0]
elif kl == 'not-installed-device-count':
d['not_installed_device_count'] = v[0]
elif kl == 'not-installed-user-count':
d['not_installed_user_count'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter install-summary. All possible keys are:'
' failed-device-count, failed-user-count, installed-device-count, installed-user-count,'
' not-installed-device-count, not-installed-user-count, id'.format(k)
)
return d
class AddDataRecoveryCertificate(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.data_recovery_certificate = action
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'certificate':
d['certificate'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'expiration-date-time':
d['expiration_date_time'] = v[0]
elif kl == 'subject-name':
d['subject_name'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter data-recovery-certificate. All possible keys are:'
' certificate, description, expiration-date-time, subject-name'.format(k)
)
return d
class AddEnterpriseInternalProxyServers(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddEnterpriseInternalProxyServers, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'resources':
d['resources'] = v
else:
raise CLIError(
'Unsupported Key {} is provided for parameter enterprise-internal-proxy-servers. All possible keys'
' are: display-name, resources'.format(k)
)
return d
class AddEnterpriseNetworkDomainNames(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddEnterpriseNetworkDomainNames, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'resources':
d['resources'] = v
else:
raise CLIError(
'Unsupported Key {} is provided for parameter enterprise-network-domain-names. All possible keys'
' are: display-name, resources'.format(k)
)
return d
class AddEnterpriseProtectedDomainNames(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddEnterpriseProtectedDomainNames, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'resources':
d['resources'] = v
else:
raise CLIError(
'Unsupported Key {} is provided for parameter enterprise-protected-domain-names. All possible keys'
' are: display-name, resources'.format(k)
)
return d
class AddEnterpriseProxyServers(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddEnterpriseProxyServers, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'resources':
d['resources'] = v
else:
raise CLIError(
'Unsupported Key {} is provided for parameter enterprise-proxy-servers. All possible keys are:'
' display-name, resources'.format(k)
)
return d
class AddExemptApps(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddExemptApps, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'denied':
d['denied'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'product-name':
d['product_name'] = v[0]
elif kl == 'publisher-name':
d['publisher_name'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter exempt-apps. All possible keys are: denied,'
' description, display-name, product-name, publisher-name'.format(k)
)
return d
class AddNeutralDomainResources(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddNeutralDomainResources, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'resources':
d['resources'] = v
else:
raise CLIError(
'Unsupported Key {} is provided for parameter neutral-domain-resources. All possible keys are:'
' display-name, resources'.format(k)
)
return d
class AddProtectedApps(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddProtectedApps, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'denied':
d['denied'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'product-name':
d['product_name'] = v[0]
elif kl == 'publisher-name':
d['publisher_name'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter protected-apps. All possible keys are: denied,'
' description, display-name, product-name, publisher-name'.format(k)
)
return d
class AddSmbAutoEncryptedFileExtensions(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddSmbAutoEncryptedFileExtensions, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'resources':
d['resources'] = v
else:
raise CLIError(
'Unsupported Key {} is provided for parameter smb-auto-encrypted-file-extensions. All possible keys'
' are: display-name, resources'.format(k)
)
return d
class AddExemptAppLockerFiles(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddExemptAppLockerFiles, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'file':
d['file'] = v[0]
elif kl == 'file-hash':
d['file_hash'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter exempt-app-locker-files. All possible keys are:'
' display-name, file, file-hash, version, id'.format(k)
)
return d
class AddProtectedAppLockerFiles(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddProtectedAppLockerFiles, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'file':
d['file'] = v[0]
elif kl == 'file-hash':
d['file_hash'] = v[0]
elif kl == 'version':
d['version'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter protected-app-locker-files. All possible keys are:'
' display-name, file, file-hash, version, id'.format(k)
)
return d
class AddDeviceStatuses(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddDeviceStatuses, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'compliance-grace-period-expiration-date-time':
d['compliance_grace_period_expiration_date_time'] = v[0]
elif kl == 'device-display-name':
d['device_display_name'] = v[0]
elif kl == 'device-model':
d['device_model'] = v[0]
elif kl == 'last-reported-date-time':
d['last_reported_date_time'] = v[0]
elif kl == 'status':
d['status'] = v[0]
elif kl == 'user-name':
d['user_name'] = v[0]
elif kl == 'user-principal-name':
d['user_principal_name'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter device-statuses. All possible keys are:'
' compliance-grace-period-expiration-date-time, device-display-name, device-model,'
' last-reported-date-time, status, user-name, user-principal-name, id'.format(k)
)
return d
class AddDeviceStatusSummary(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.device_status_summary = action
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'configuration-version':
d['configuration_version'] = v[0]
elif kl == 'error-count':
d['error_count'] = v[0]
elif kl == 'failed-count':
d['failed_count'] = v[0]
elif kl == 'last-update-date-time':
d['last_update_date_time'] = v[0]
elif kl == 'not-applicable-count':
d['not_applicable_count'] = v[0]
elif kl == 'pending-count':
d['pending_count'] = v[0]
elif kl == 'success-count':
d['success_count'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter device-status-summary. All possible keys are:'
' configuration-version, error-count, failed-count, last-update-date-time, not-applicable-count,'
' pending-count, success-count, id'.format(k)
)
return d
class AddUserStatuses(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddUserStatuses, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'devices-count':
d['devices_count'] = v[0]
elif kl == 'last-reported-date-time':
d['last_reported_date_time'] = v[0]
elif kl == 'status':
d['status'] = v[0]
elif kl == 'user-display-name':
d['user_display_name'] = v[0]
elif kl == 'user-principal-name':
d['user_principal_name'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter user-statuses. All possible keys are: devices-count,'
' last-reported-date-time, status, user-display-name, user-principal-name, id'.format(k)
)
return d
class AddUserStatusSummary(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.user_status_summary = action
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'configuration-version':
d['configuration_version'] = v[0]
elif kl == 'error-count':
d['error_count'] = v[0]
elif kl == 'failed-count':
d['failed_count'] = v[0]
elif kl == 'last-update-date-time':
d['last_update_date_time'] = v[0]
elif kl == 'not-applicable-count':
d['not_applicable_count'] = v[0]
elif kl == 'pending-count':
d['pending_count'] = v[0]
elif kl == 'success-count':
d['success_count'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter user-status-summary. All possible keys are:'
' configuration-version, error-count, failed-count, last-update-date-time, not-applicable-count,'
' pending-count, success-count, id'.format(k)
)
return d
class AddCategories(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddCategories, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter categories. All possible keys are: display-name,'
' last-modified-date-time, id'.format(k)
)
return d
class AddCustomSettings(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddCustomSettings, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'name':
d['name'] = v[0]
elif kl == 'value':
d['value'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter custom-settings. All possible keys are: name, value'
.format(k)
)
return d
class AddDeviceappmanagementManagedebooksDeviceStates(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddDeviceappmanagementManagedebooksDeviceStates, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string):
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'device-id':
d['device_id'] = v[0]
elif kl == 'device-name':
d['device_name'] = v[0]
elif kl == 'error-code':
d['error_code'] = v[0]
elif kl == 'install-state':
d['install_state'] = v[0]
elif kl == 'last-sync-date-time':
d['last_sync_date_time'] = v[0]
elif kl == 'os-description':
d['os_description'] = v[0]
elif kl == 'os-version':
d['os_version'] = v[0]
elif kl == 'user-name':
d['user_name'] = v[0]
elif kl == 'id':
d['id'] = v[0]
else:
raise CLIError(
'Unsupported Key {} is provided for parameter device-states. All possible keys are: device-id,'
' device-name, error-code, install-state, last-sync-date-time, os-description, os-version,'
' user-name, id'.format(k)
)
return d
| 35.370914 | 120 | 0.534486 | 5,665 | 53,021 | 4.865313 | 0.047485 | 0.039837 | 0.034177 | 0.04557 | 0.886365 | 0.864596 | 0.846746 | 0.838328 | 0.834119 | 0.834119 | 0 | 0.006055 | 0.342808 | 53,021 | 1,498 | 121 | 35.394526 | 0.784933 | 0.00943 | 0 | 0.805183 | 0 | 0.025022 | 0.274486 | 0.117942 | 0 | 0 | 0 | 0 | 0 | 1 | 0.053619 | false | 0.002681 | 0.002681 | 0 | 0.10992 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0a32ccad6a36f2c4b87ba3b7c27959790492e735 | 116,738 | py | Python | heat/tests/api/openstack_v1/test_stacks.py | larsks/heat | 11064586e90166a037f8868835e6ce36f7306276 | [
"Apache-2.0"
] | null | null | null | heat/tests/api/openstack_v1/test_stacks.py | larsks/heat | 11064586e90166a037f8868835e6ce36f7306276 | [
"Apache-2.0"
] | null | null | null | heat/tests/api/openstack_v1/test_stacks.py | larsks/heat | 11064586e90166a037f8868835e6ce36f7306276 | [
"Apache-2.0"
] | null | null | null | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
from oslo_config import cfg
import six
import webob.exc
import heat.api.middleware.fault as fault
import heat.api.openstack.v1.stacks as stacks
from heat.common import context
from heat.common import exception as heat_exc
from heat.common import identifier
from heat.common import policy
from heat.common import template_format
from heat.common import urlfetch
from heat.rpc import api as rpc_api
from heat.rpc import client as rpc_client
from heat.tests.api.openstack_v1 import tools
from heat.tests import common
class InstantiationDataTest(common.HeatTestCase):
def test_parse_error_success(self):
with stacks.InstantiationData.parse_error_check('Garbage'):
pass
def test_parse_error(self):
def generate_error():
with stacks.InstantiationData.parse_error_check('Garbage'):
raise ValueError
self.assertRaises(webob.exc.HTTPBadRequest, generate_error)
def test_parse_error_message(self):
# make sure the parser error gets through to the caller.
bad_temp = '''
heat_template_version: '2013-05-23'
parameters:
KeyName:
type: string
description: bla
'''
def generate_error():
with stacks.InstantiationData.parse_error_check('foo'):
template_format.parse(bad_temp)
parse_ex = self.assertRaises(webob.exc.HTTPBadRequest, generate_error)
self.assertIn('foo', six.text_type(parse_ex))
def test_stack_name(self):
body = {'stack_name': 'wibble'}
data = stacks.InstantiationData(body)
self.assertEqual('wibble', data.stack_name())
def test_stack_name_missing(self):
body = {'not the stack_name': 'wibble'}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.stack_name)
def test_template_inline(self):
template = {'foo': 'bar', 'blarg': 'wibble'}
body = {'template': template}
data = stacks.InstantiationData(body)
self.assertEqual(template, data.template())
def test_template_string_json(self):
template = ('{"heat_template_version": "2013-05-23",'
'"foo": "bar", "blarg": "wibble"}')
body = {'template': template}
data = stacks.InstantiationData(body)
self.assertEqual(json.loads(template), data.template())
def test_template_string_yaml(self):
template = '''HeatTemplateFormatVersion: 2012-12-12
foo: bar
blarg: wibble
'''
parsed = {u'HeatTemplateFormatVersion': u'2012-12-12',
u'blarg': u'wibble',
u'foo': u'bar'}
body = {'template': template}
data = stacks.InstantiationData(body)
self.assertEqual(parsed, data.template())
def test_template_int(self):
template = '42'
body = {'template': template}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.template)
def test_template_url(self):
template = {'heat_template_version': '2013-05-23',
'foo': 'bar',
'blarg': 'wibble'}
url = 'http://example.com/template'
body = {'template_url': url}
data = stacks.InstantiationData(body)
self.m.StubOutWithMock(urlfetch, 'get')
urlfetch.get(url).AndReturn(json.dumps(template))
self.m.ReplayAll()
self.assertEqual(template, data.template())
self.m.VerifyAll()
def test_template_priority(self):
template = {'foo': 'bar', 'blarg': 'wibble'}
url = 'http://example.com/template'
body = {'template': template, 'template_url': url}
data = stacks.InstantiationData(body)
self.m.StubOutWithMock(urlfetch, 'get')
self.m.ReplayAll()
self.assertEqual(template, data.template())
self.m.VerifyAll()
def test_template_missing(self):
template = {'foo': 'bar', 'blarg': 'wibble'}
body = {'not the template': template}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.template)
def test_template_exceeds_max_template_size(self):
cfg.CONF.set_override('max_template_size', 10, enforce_type=True)
template = json.dumps(['a'] * cfg.CONF.max_template_size)
body = {'template': template}
data = stacks.InstantiationData(body)
error = self.assertRaises(heat_exc.RequestLimitExceeded,
data.template)
msg = ('Request limit exceeded: Template size (%(actual_len)s '
'bytes) exceeds maximum allowed size (%(limit)s bytes).') % {
'actual_len': len(str(template)),
'limit': cfg.CONF.max_template_size}
self.assertEqual(msg, six.text_type(error))
def test_parameters(self):
params = {'foo': 'bar', 'blarg': 'wibble'}
body = {'parameters': params,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}}
data = stacks.InstantiationData(body)
self.assertEqual(body, data.environment())
def test_environment_only_params(self):
env = {'parameters': {'foo': 'bar', 'blarg': 'wibble'}}
body = {'environment': env}
data = stacks.InstantiationData(body)
self.assertEqual(env, data.environment())
def test_environment_and_parameters(self):
body = {'parameters': {'foo': 'bar'},
'environment': {'parameters': {'blarg': 'wibble'}}}
expect = {'parameters': {'blarg': 'wibble',
'foo': 'bar'},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}}
data = stacks.InstantiationData(body)
self.assertEqual(expect, data.environment())
def test_parameters_override_environment(self):
# This tests that the cli parameters will override
# any parameters in the environment.
body = {'parameters': {'foo': 'bar',
'tester': 'Yes'},
'environment': {'parameters': {'blarg': 'wibble',
'tester': 'fail'}}}
expect = {'parameters': {'blarg': 'wibble',
'foo': 'bar',
'tester': 'Yes'},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}}
data = stacks.InstantiationData(body)
self.assertEqual(expect, data.environment())
def test_environment_empty_params(self):
env = {'parameters': None}
body = {'environment': env}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.environment)
def test_environment_bad_format(self):
env = {'somethingnotsupported': {'blarg': 'wibble'}}
body = {'environment': json.dumps(env)}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.environment)
def test_environment_missing(self):
env = {'foo': 'bar', 'blarg': 'wibble'}
body = {'not the environment': env}
data = stacks.InstantiationData(body)
self.assertEqual({'parameters': {}, 'encrypted_param_names': [],
'parameter_defaults': {}, 'resource_registry': {},
'event_sinks': []},
data.environment())
def test_args(self):
body = {
'parameters': {},
'environment': {},
'stack_name': 'foo',
'template': {},
'template_url': 'http://example.com/',
'timeout_mins': 60,
}
data = stacks.InstantiationData(body)
self.assertEqual({'timeout_mins': 60}, data.args())
@mock.patch.object(policy.Enforcer, 'enforce')
class StackControllerTest(tools.ControllerTest, common.HeatTestCase):
"""Tests the API class StackController.
Tests the API class which acts as the WSGI controller,
the endpoint processing API requests after they are routed
"""
def setUp(self):
super(StackControllerTest, self).setUp()
# Create WSGI controller instance
class DummyConfig(object):
bind_port = 8004
cfgopts = DummyConfig()
self.controller = stacks.StackController(options=cfgopts)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
req = self._get('/stacks')
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
engine_resp = [
{
u'stack_identity': dict(identity),
u'updated_time': u'2012-07-09T09:13:11Z',
u'template_description': u'blah',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_action': u'CREATE',
u'stack_status': u'COMPLETE',
u'parameters': {},
u'outputs': [],
u'notification_topics': [],
u'capabilities': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
]
mock_call.return_value = engine_resp
result = self.controller.index(req, tenant_id=identity.tenant)
expected = {
'stacks': [
{
'links': [{"href": self._url(identity),
"rel": "self"}],
'id': '1',
u'updated_time': u'2012-07-09T09:13:11Z',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': u'wordpress',
u'stack_status': u'CREATE_COMPLETE'
}
]
}
self.assertEqual(expected, result)
default_args = {'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None,
'show_deleted': False, 'show_nested': False,
'show_hidden': False, 'tags': None,
'tags_any': None, 'not_tags': None,
'not_tags_any': None}
mock_call.assert_called_once_with(
req.context, ('list_stacks', default_args), version='1.33')
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_whitelists_pagination_params(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {
'limit': 10,
'sort_keys': 'fake sort keys',
'marker': 'fake marker',
'sort_dir': 'fake sort dir',
'balrog': 'you shall not pass!'
}
req = self._get('/stacks', params=params)
mock_call.return_value = []
self.controller.index(req, tenant_id=self.tenant)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertEqual(12, len(engine_args))
self.assertIn('limit', engine_args)
self.assertIn('sort_keys', engine_args)
self.assertIn('marker', engine_args)
self.assertIn('sort_dir', engine_args)
self.assertIn('filters', engine_args)
self.assertNotIn('balrog', engine_args)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_limit_not_int(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'limit': 'not-an-int'}
req = self._get('/stacks', params=params)
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req,
tenant_id=self.tenant)
self.assertEqual("Only integer is acceptable by 'limit'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_whitelist_filter_params(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {
'id': 'fake id',
'status': 'fake status',
'name': 'fake name',
'action': 'fake action',
'username': 'fake username',
'tenant': 'fake tenant',
'owner_id': 'fake owner-id',
'stack_name': 'fake stack name',
'stack_identity': 'fake identity',
'creation_time': 'create timestamp',
'updated_time': 'update timestamp',
'deletion_time': 'deletion timestamp',
'notification_topics': 'fake topic',
'description': 'fake description',
'template_description': 'fake description',
'parameters': 'fake params',
'outputs': 'fake outputs',
'stack_action': 'fake action',
'stack_status': 'fake status',
'stack_status_reason': 'fake status reason',
'capabilities': 'fake capabilities',
'disable_rollback': 'fake value',
'timeout_mins': 'fake timeout',
'stack_owner': 'fake owner',
'parent': 'fake parent',
'stack_user_project_id': 'fake project id',
'tags': 'fake tags',
'barlog': 'you shall not pass!'
}
req = self._get('/stacks', params=params)
mock_call.return_value = []
self.controller.index(req, tenant_id=self.tenant)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertIn('filters', engine_args)
filters = engine_args['filters']
self.assertEqual(16, len(filters))
for key in ('id', 'status', 'name', 'action', 'username', 'tenant',
'owner_id', 'stack_name', 'stack_action', 'stack_status',
'stack_status_reason', 'disable_rollback', 'timeout_mins',
'stack_owner', 'parent', 'stack_user_project_id'):
self.assertIn(key, filters)
for key in ('stack_identity', 'creation_time', 'updated_time',
'deletion_time', 'notification_topics', 'description',
'template_description', 'parameters', 'outputs',
'capabilities', 'tags', 'barlog'):
self.assertNotIn(key, filters)
def test_index_returns_stack_count_if_with_count_is_true(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'True'}
req = self._get('/stacks', params=params)
engine = self.controller.rpc_client
engine.list_stacks = mock.Mock(return_value=[])
engine.count_stacks = mock.Mock(return_value=0)
result = self.controller.index(req, tenant_id=self.tenant)
self.assertEqual(0, result['count'])
def test_index_doesnt_return_stack_count_if_with_count_is_false(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'false'}
req = self._get('/stacks', params=params)
engine = self.controller.rpc_client
engine.list_stacks = mock.Mock(return_value=[])
engine.count_stacks = mock.Mock()
result = self.controller.index(req, tenant_id=self.tenant)
self.assertNotIn('count', result)
self.assertFalse(engine.count_stacks.called)
def test_index_with_count_is_invalid(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'invalid_value'}
req = self._get('/stacks', params=params)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index,
req, tenant_id=self.tenant)
excepted = ('Unrecognized value "invalid_value" for "with_count", '
'acceptable values are: true, false')
self.assertIn(excepted, six.text_type(exc))
@mock.patch.object(rpc_client.EngineClient, 'count_stacks')
def test_index_doesnt_break_with_old_engine(self, mock_count_stacks,
mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'True'}
req = self._get('/stacks', params=params)
engine = self.controller.rpc_client
engine.list_stacks = mock.Mock(return_value=[])
mock_count_stacks.side_effect = AttributeError("Should not exist")
result = self.controller.index(req, tenant_id=self.tenant)
self.assertNotIn('count', result)
def test_index_enforces_global_index_if_global_tenant(self, mock_enforce):
params = {'global_tenant': 'True'}
req = self._get('/stacks', params=params)
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
self.controller.index(req, tenant_id=self.tenant)
mock_enforce.assert_called_with(action='global_index',
scope=self.controller.REQUEST_SCOPE,
context=self.context)
def test_global_index_uses_admin_context(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
mock_admin_ctxt = self.patchobject(context, 'get_admin_context')
params = {'global_tenant': 'True'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY)
self.assertEqual(1, mock_admin_ctxt.call_count)
def test_global_index_show_deleted_false(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_deleted': 'False'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
show_deleted=False)
def test_global_index_show_deleted_true(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_deleted': 'True'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
show_deleted=True)
def test_global_index_show_nested_false(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_nested': 'False'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
show_nested=False)
def test_global_index_show_nested_true(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_nested': 'True'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
show_nested=True)
def test_index_show_deleted_True_with_count_True(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock(return_value=0)
params = {'show_deleted': 'True',
'with_count': 'True'}
req = self._get('/stacks', params=params)
result = self.controller.index(req, tenant_id=self.tenant)
self.assertEqual(0, result['count'])
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
show_deleted=True)
rpc_client.count_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
show_deleted=True,
show_nested=False,
show_hidden=False,
tags=None,
tags_any=None,
not_tags=None,
not_tags_any=None)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_detail(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'detail', True)
req = self._get('/stacks/detail')
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
engine_resp = [
{
u'stack_identity': dict(identity),
u'updated_time': u'2012-07-09T09:13:11Z',
u'template_description': u'blah',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_action': u'CREATE',
u'stack_status': u'COMPLETE',
u'parameters': {'foo': 'bar'},
u'outputs': ['key', 'value'],
u'notification_topics': [],
u'capabilities': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
]
mock_call.return_value = engine_resp
result = self.controller.detail(req, tenant_id=identity.tenant)
expected = {
'stacks': [
{
'links': [{"href": self._url(identity),
"rel": "self"}],
'id': '1',
u'updated_time': u'2012-07-09T09:13:11Z',
u'template_description': u'blah',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_status': u'CREATE_COMPLETE',
u'parameters': {'foo': 'bar'},
u'outputs': ['key', 'value'],
u'notification_topics': [],
u'capabilities': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
]
}
self.assertEqual(expected, result)
default_args = {'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None,
'show_deleted': False, 'show_nested': False,
'show_hidden': False, 'tags': None,
'tags_any': None, 'not_tags': None,
'not_tags_any': None}
mock_call.assert_called_once_with(
req.context, ('list_stacks', default_args), version='1.33')
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_rmt_aterr(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
req = self._get('/stacks')
mock_call.side_effect = tools.to_remote_error(AttributeError())
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant)
self.assertEqual(400, resp.json['code'])
self.assertEqual('AttributeError', resp.json['error']['type'])
mock_call.assert_called_once_with(
req.context, ('list_stacks', mock.ANY), version='1.33')
def test_index_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', False)
req = self._get('/stacks')
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_rmt_interr(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
req = self._get('/stacks')
mock_call.side_effect = tools.to_remote_error(Exception())
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant)
self.assertEqual(500, resp.json['code'])
self.assertEqual('Exception', resp.json['error']['type'])
mock_call.assert_called_once_with(
req.context, ('list_stacks', mock.ANY), version='1.33')
def test_create(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': identity.stack_name,
'parameters': parameters,
'environment_files': ['foo.yaml'],
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': ['foo.yaml'],
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
response = self.controller.create(req,
tenant_id=identity.tenant,
body=body)
expected = {'stack':
{'id': '1',
'links': [{'href': self._url(identity), 'rel': 'self'}]}}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_create_with_tags(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': identity.stack_name,
'parameters': parameters,
'tags': 'tag1,tag2',
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30, 'tags': ['tag1', 'tag2']},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
response = self.controller.create(req,
tenant_id=identity.tenant,
body=body)
expected = {'stack':
{'id': '1',
'links': [{'href': self._url(identity), 'rel': 'self'}]}}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_adopt(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {
"heat_template_version": "2013-05-23",
"parameters": {"app_dbx": {"type": "string"}},
"resources": {"res1": {"type": "GenericResourceType"}}}
parameters = {"app_dbx": "test"}
adopt_data = {
"status": "COMPLETE",
"name": "rtrove1",
"parameters": parameters,
"template": template,
"action": "CREATE",
"id": "8532f0d3-ea84-444e-b2bb-2543bb1496a4",
"resources": {"res1": {
"status": "COMPLETE",
"name": "database_password",
"resource_id": "yBpuUROjfGQ2gKOD",
"action": "CREATE",
"type": "GenericResourceType",
"metadata": {}}}}
body = {'template': None,
'stack_name': identity.stack_name,
'parameters': parameters,
'timeout_mins': 30,
'adopt_stack_data': str(adopt_data)}
req = self._post('/stacks', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30,
'adopt_stack_data': str(adopt_data)},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
response = self.controller.create(req,
tenant_id=identity.tenant,
body=body)
expected = {'stack':
{'id': '1',
'links': [{'href': self._url(identity), 'rel': 'self'}]}}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_adopt_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
body = {'template': None,
'stack_name': identity.stack_name,
'parameters': {},
'timeout_mins': 'not-an-int',
'adopt_stack_data': 'does not matter'}
req = self._post('/stacks', json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req,
tenant_id=self.tenant, body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_adopt_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
parameters = {"app_dbx": "test"}
adopt_data = ["Test"]
body = {'template': None,
'stack_name': identity.stack_name,
'parameters': parameters,
'timeout_mins': 30,
'adopt_stack_data': str(adopt_data)}
req = self._post('/stacks', json.dumps(body))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(400, resp.status_code)
self.assertEqual('400 Bad Request', resp.status)
self.assertIn('Invalid adopt data', resp.text)
self.m.VerifyAll()
def test_create_with_files(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': identity.stack_name,
'parameters': parameters,
'files': {'my.yaml': 'This is the file contents.'},
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {'my.yaml': 'This is the file contents.'},
'environment_files': None,
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
result = self.controller.create(req,
tenant_id=identity.tenant,
body=body)
expected = {'stack':
{'id': '1',
'links': [{'href': self._url(identity), 'rel': 'self'}]}}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_create_err_rpcerr(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True, 3)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
unknown_parameter = heat_exc.UnknownUserParameter(key='a')
missing_parameter = heat_exc.UserParameterMissing(key='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndRaise(tools.to_remote_error(AttributeError()))
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndRaise(tools.to_remote_error(unknown_parameter))
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndRaise(tools.to_remote_error(missing_parameter))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('AttributeError', resp.json['error']['type'])
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('UnknownUserParameter', resp.json['error']['type'])
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('UserParameterMissing', resp.json['error']['type'])
self.m.VerifyAll()
def test_create_err_existing(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
error = heat_exc.StackExists(stack_name='s')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(409, resp.json['code'])
self.assertEqual('StackExists', resp.json['error']['type'])
self.m.VerifyAll()
def test_create_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 'not-an-int'}
req = self._post('/stacks', json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req,
tenant_id=self.tenant, body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_create_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', False)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_create_err_engine(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
error = heat_exc.StackValidationFailed(message='')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None,
'template_id': None}),
version='1.29'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('StackValidationFailed', resp.json['error']['type'])
self.m.VerifyAll()
def test_create_err_stack_bad_reqest(self, mock_enforce):
cfg.CONF.set_override('debug', True, enforce_type=True)
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
error = heat_exc.HTTPExceptionDisguise(webob.exc.HTTPBadRequest())
self.controller.create = mock.MagicMock(side_effect=error)
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.create, req, body)
# When HTTP disguised exceptions reach the fault app, they are
# converted into regular responses, just like non-HTTP exceptions
self.assertEqual(400, resp.json['code'])
self.assertEqual('HTTPBadRequest', resp.json['error']['type'])
self.assertIsNotNone(resp.json['error']['traceback'])
@mock.patch.object(rpc_client.EngineClient, 'call')
@mock.patch.object(stacks.stacks_view, 'format_stack')
def test_preview_stack(self, mock_format, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'preview', True)
body = {'stack_name': 'foo', 'template': {}, 'parameters': {}}
req = self._post('/stacks/preview', json.dumps(body))
mock_call.return_value = {}
mock_format.return_value = 'formatted_stack'
result = self.controller.preview(req, tenant_id=self.tenant, body=body)
self.assertEqual({'stack': 'formatted_stack'}, result)
@mock.patch.object(rpc_client.EngineClient, 'call')
@mock.patch.object(stacks.stacks_view, 'format_stack')
def test_preview_with_tags_timeout(self, mock_format, mock_call,
mock_enforce):
self._mock_enforce_setup(mock_enforce, 'preview', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': identity.stack_name,
'parameters': parameters,
'tags': 'tag1,tag2',
'timeout_mins': 30}
req = self._post('/stacks/preview', json.dumps(body))
mock_call.return_value = {}
mock_format.return_value = 'formatted_stack_preview'
response = self.controller.preview(req,
tenant_id=identity.tenant,
body=body)
rpc_client.EngineClient.call.assert_called_once_with(
req.context,
('preview_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30, 'tags': ['tag1', 'tag2']}}),
version='1.23'
)
self.assertEqual({'stack': 'formatted_stack_preview'}, response)
def test_preview_update_stack(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'preview_update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s/preview' %
identity, json.dumps(body))
resource_changes = {'updated': [],
'deleted': [],
'unchanged': [],
'added': [],
'replaced': []}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('preview_update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30}}),
version='1.23'
).AndReturn(resource_changes)
self.m.ReplayAll()
result = self.controller.preview_update(req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual({'resource_changes': resource_changes}, result)
self.m.VerifyAll()
def test_preview_update_stack_patch(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'preview_update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': None,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s/preview' %
identity, json.dumps(body))
resource_changes = {'updated': [],
'deleted': [],
'unchanged': [],
'added': [],
'replaced': []}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('preview_update_stack',
{'stack_identity': dict(identity),
'template': None,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {rpc_api.PARAM_EXISTING: True,
'timeout_mins': 30}}),
version='1.23'
).AndReturn(resource_changes)
self.m.ReplayAll()
result = self.controller.preview_update_patch(
req, tenant_id=identity.tenant, stack_name=identity.stack_name,
stack_id=identity.stack_id, body=body)
self.assertEqual({'resource_changes': resource_changes}, result)
self.m.VerifyAll()
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_update_immutable_parameter(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'param1': u'bar'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' %
identity, json.dumps(body))
error = heat_exc.ImmutableParameterModified(keys='param1')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {u'parameters': parameters,
u'encrypted_param_names': [],
u'parameter_defaults': {},
u'event_sinks': [],
u'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('ImmutableParameterModified',
resp.json['error']['type'])
self.assertIn("The following parameters are immutable",
six.text_type(resp.json['error']['message']))
self.m.VerifyAll()
def test_lookup(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
req = self._get('/stacks/%(stack_name)s' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': identity.stack_name})
).AndReturn(identity)
self.m.ReplayAll()
found = self.assertRaises(
webob.exc.HTTPFound, self.controller.lookup, req,
tenant_id=identity.tenant, stack_name=identity.stack_name)
self.assertEqual(self._url(identity), found.location)
self.m.VerifyAll()
def test_lookup_arn(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
req = self._get('/stacks%s' % identity.arn_url_path())
self.m.ReplayAll()
found = self.assertRaises(
webob.exc.HTTPFound, self.controller.lookup,
req, tenant_id=identity.tenant, stack_name=identity.arn())
self.assertEqual(self._url(identity), found.location)
self.m.VerifyAll()
def test_lookup_nonexistent(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s' % {
'stack_name': stack_name})
error = heat_exc.EntityNotFound(entity='Stack', name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': stack_name})
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_lookup_err_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', False)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s' % {
'stack_name': stack_name})
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_lookup_resource(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
req = self._get('/stacks/%(stack_name)s/resources' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': identity.stack_name})
).AndReturn(identity)
self.m.ReplayAll()
found = self.assertRaises(
webob.exc.HTTPFound, self.controller.lookup, req,
tenant_id=identity.tenant, stack_name=identity.stack_name,
path='resources')
self.assertEqual(self._url(identity) + '/resources',
found.location)
self.m.VerifyAll()
def test_lookup_resource_nonexistent(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s/resources' % {
'stack_name': stack_name})
error = heat_exc.EntityNotFound(entity='Stack', name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': stack_name})
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name,
path='resources')
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_lookup_resource_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', False)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s/resources' % {
'stack_name': stack_name})
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name,
path='resources')
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_show(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity,
params={'resolve_outputs': True})
parameters = {u'DBUsername': u'admin',
u'LinuxDistribution': u'F17',
u'InstanceType': u'm1.large',
u'DBRootPassword': u'admin',
u'DBPassword': u'admin',
u'DBName': u'wordpress'}
outputs = [{u'output_key': u'WebsiteURL',
u'description': u'URL for Wordpress wiki',
u'output_value': u'http://10.0.0.8/wordpress'}]
engine_resp = [
{
u'stack_identity': dict(identity),
u'updated_time': u'2012-07-09T09:13:11Z',
u'parameters': parameters,
u'outputs': outputs,
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'notification_topics': [],
u'stack_action': u'CREATE',
u'stack_status': u'COMPLETE',
u'description': u'blah',
u'disable_rollback': True,
u'timeout_mins':60,
u'capabilities': [],
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('show_stack', {'stack_identity': dict(identity),
'resolve_outputs': True}),
version='1.20'
).AndReturn(engine_resp)
self.m.ReplayAll()
response = self.controller.show(req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
expected = {
'stack': {
'links': [{"href": self._url(identity),
"rel": "self"}],
'id': '6',
u'updated_time': u'2012-07-09T09:13:11Z',
u'parameters': parameters,
u'outputs': outputs,
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_status': u'CREATE_COMPLETE',
u'capabilities': [],
u'notification_topics': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_show_without_resolve_outputs(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity,
params={'resolve_outputs': False})
parameters = {u'DBUsername': u'admin',
u'LinuxDistribution': u'F17',
u'InstanceType': u'm1.large',
u'DBRootPassword': u'admin',
u'DBPassword': u'admin',
u'DBName': u'wordpress'}
engine_resp = [
{
u'stack_identity': dict(identity),
u'updated_time': u'2012-07-09T09:13:11Z',
u'parameters': parameters,
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'notification_topics': [],
u'stack_action': u'CREATE',
u'stack_status': u'COMPLETE',
u'description': u'blah',
u'disable_rollback': True,
u'timeout_mins':60,
u'capabilities': [],
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('show_stack', {'stack_identity': dict(identity),
'resolve_outputs': False}),
version='1.20'
).AndReturn(engine_resp)
self.m.ReplayAll()
response = self.controller.show(req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
expected = {
'stack': {
'links': [{"href": self._url(identity),
"rel": "self"}],
'id': '6',
u'updated_time': u'2012-07-09T09:13:11Z',
u'parameters': parameters,
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_status': u'CREATE_COMPLETE',
u'capabilities': [],
u'notification_topics': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_show_notfound(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
error = heat_exc.EntityNotFound(entity='Stack', name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('show_stack', {'stack_identity': dict(identity),
'resolve_outputs': True}),
version='1.20'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_show_invalidtenant(self, mock_enforce):
identity = identifier.HeatIdentifier('wibble', 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
self.m.VerifyAll()
def test_show_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_get_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'template', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
template = {u'Foo': u'bar'}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('get_template', {'stack_identity': dict(identity)})
).AndReturn(template)
self.m.ReplayAll()
response = self.controller.template(req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(template, response)
self.m.VerifyAll()
def test_get_environment(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'environment', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
env = {'parameters': {'Foo': 'bar'}}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('get_environment', {'stack_identity': dict(identity)},),
version='1.28',
).AndReturn(env)
self.m.ReplayAll()
response = self.controller.environment(req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(env, response)
self.m.VerifyAll()
def test_get_files(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'files', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
files = {'foo.yaml': 'i am yaml'}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('get_files', {'stack_identity': dict(identity)},),
version='1.32',
).AndReturn(files)
self.m.ReplayAll()
response = self.controller.files(req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(files, response)
self.m.VerifyAll()
def test_get_template_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'template', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s/template'
% identity)
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.template,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
self.m.VerifyAll()
def test_get_template_err_notfound(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'template', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
error = heat_exc.EntityNotFound(entity='Stack', name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('get_template', {'stack_identity': dict(identity)})
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.template,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_update(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_tags(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'tags': 'tag1,tag2',
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30, 'tags': ['tag1', 'tag2']},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_bad_name(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
error = heat_exc.EntityNotFound(entity='Stack', name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {u'parameters': parameters,
u'encrypted_param_names': [],
u'parameter_defaults': {},
u'event_sinks': [],
u'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_update_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 'not-int'}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_update_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', False)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_update_with_existing_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
body = {'template': None,
'parameters': {},
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': None,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {rpc_api.PARAM_EXISTING: True,
'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_existing_parameters(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
body = {'template': template,
'parameters': {},
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {rpc_api.PARAM_EXISTING: True,
'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_existing_parameters_with_tags(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
body = {'template': template,
'parameters': {},
'files': {},
'tags': 'tag1,tag2',
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {rpc_api.PARAM_EXISTING: True,
'timeout_mins': 30,
'tags': ['tag1', 'tag2']},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_patched_existing_parameters(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {rpc_api.PARAM_EXISTING: True,
'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_patch_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 'not-int'}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update_patch, req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_update_with_existing_and_default_parameters(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
clear_params = [u'DBUsername', u'DBPassword', u'LinuxDistribution']
body = {'template': template,
'parameters': {},
'clear_parameters': clear_params,
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {rpc_api.PARAM_EXISTING: True,
'clear_parameters': clear_params,
'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_patched_and_default_parameters(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
clear_params = [u'DBUsername', u'DBPassword', u'LinuxDistribution']
body = {'template': template,
'parameters': parameters,
'clear_parameters': clear_params,
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'args': {rpc_api.PARAM_EXISTING: True,
'clear_parameters': clear_params,
'timeout_mins': 30},
'template_id': None}),
version='1.29'
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_delete(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
# Engine returns None when delete successful
rpc_client.EngineClient.call(
req.context,
('delete_stack', {'stack_identity': dict(identity)})
).AndReturn(None)
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPNoContent,
self.controller.delete,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.m.VerifyAll()
def test_delete_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity)
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.delete,
req, tenant_id=self.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_export(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'export', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s/export' %
identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
# Engine returns json data
expected = {"name": "test", "id": "123"}
rpc_client.EngineClient.call(
req.context,
('export_stack', {'stack_identity': dict(identity)}),
version='1.22'
).AndReturn(expected)
self.m.ReplayAll()
ret = self.controller.export(req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(expected, ret)
self.m.VerifyAll()
def test_abandon(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'abandon', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._abandon('/stacks/%(stack_name)s/%(stack_id)s' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
# Engine returns json data on abandon completion
expected = {"name": "test", "id": "123"}
rpc_client.EngineClient.call(
req.context,
('abandon_stack', {'stack_identity': dict(identity)})
).AndReturn(expected)
self.m.ReplayAll()
ret = self.controller.abandon(req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(expected, ret)
self.m.VerifyAll()
def test_abandon_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'abandon', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._abandon('/stacks/%(stack_name)s/%(stack_id)s' % identity)
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.abandon,
req, tenant_id=self.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_delete_bad_name(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity)
error = heat_exc.EntityNotFound(entity='Stack', name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
# Engine returns None when delete successful
rpc_client.EngineClient.call(
req.context,
('delete_stack', {'stack_identity': dict(identity)})
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.delete,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_validate_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'validate_template', True)
template = {u'Foo': u'bar'}
body = {'template': template}
req = self._post('/validate', json.dumps(body))
engine_response = {
u'Description': u'blah',
u'Parameters': [
{
u'NoEcho': u'false',
u'ParameterKey': u'InstanceType',
u'Description': u'Instance type'
}
]
}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('validate_template',
{'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'show_nested': False,
'ignorable_errors': None}),
version='1.24'
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.validate_template(req,
tenant_id=self.tenant,
body=body)
self.assertEqual(engine_response, response)
self.m.VerifyAll()
def test_validate_template_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'validate_template', True)
template = {u'Foo': u'bar'}
body = {'template': template}
req = self._post('/validate', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('validate_template',
{'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {}},
'files': {},
'environment_files': None,
'show_nested': False,
'ignorable_errors': None}),
version='1.24'
).AndReturn({'Error': 'fubar'})
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.validate_template,
req, tenant_id=self.tenant, body=body)
self.m.VerifyAll()
def test_validate_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'validate_template', False)
template = {u'Foo': u'bar'}
body = {'template': template}
req = self._post('/validate', json.dumps(body))
resp = tools.request_with_middleware(
fault.FaultWrapper,
self.controller.validate_template,
req, tenant_id=self.tenant, body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_list_resource_types(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_resource_types', True)
req = self._get('/resource_types')
engine_response = ['AWS::EC2::Instance',
'AWS::EC2::EIP',
'AWS::EC2::EIPAssociation']
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_resource_types',
{
'support_status': None,
'type_name': None,
'heat_version': None,
'with_description': False
}),
version="1.30"
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.list_resource_types(req,
tenant_id=self.tenant)
self.assertEqual({'resource_types': engine_response}, response)
self.m.VerifyAll()
def test_list_resource_types_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_resource_types', True)
req = self._get('/resource_types')
error = heat_exc.EntityNotFound(entity='Resource Type', name='')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_resource_types',
{
'support_status': None,
'type_name': None,
'heat_version': None,
'with_description': False
}),
version="1.30"
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(
fault.FaultWrapper,
self.controller.list_resource_types,
req, tenant_id=self.tenant)
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_list_resource_types_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_resource_types', False)
req = self._get('/resource_types')
resp = tools.request_with_middleware(
fault.FaultWrapper,
self.controller.list_resource_types,
req, tenant_id=self.tenant)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_list_outputs(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_outputs', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
outputs = [
{'output_key': 'key1', 'description': 'description'},
{'output_key': 'key2', 'description': 'description1'}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_outputs', {'stack_identity': dict(identity)}),
version='1.19'
).AndReturn(outputs)
self.m.ReplayAll()
response = self.controller.list_outputs(req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual({'outputs': outputs}, response)
self.m.VerifyAll()
def test_show_output(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show_output', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s/key' % identity)
output = {'output_key': 'key',
'output_value': 'val',
'description': 'description'}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('show_output', {'output_key': 'key',
'stack_identity': dict(identity)}),
version='1.19'
).AndReturn(output)
self.m.ReplayAll()
response = self.controller.show_output(req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
output_key='key')
self.assertEqual({'output': output}, response)
self.m.VerifyAll()
def test_list_template_versions(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_template_versions', True)
req = self._get('/template_versions')
engine_response = [
{'version': 'heat_template_version.2013-05-23', 'type': 'hot'},
{'version': 'AWSTemplateFormatVersion.2010-09-09', 'type': 'cfn'}]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context, ('list_template_versions', {}),
version="1.11"
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.list_template_versions(
req, tenant_id=self.tenant)
self.assertEqual({'template_versions': engine_response}, response)
self.m.VerifyAll()
def test_list_template_functions(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_template_functions', True)
req = self._get('/template_versions/t1/functions')
engine_response = [
{'functions': 'func1', 'description': 'desc1'},
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context, (
'list_template_functions', {'template_version': 't1'}),
version="1.13"
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.list_template_functions(
req, tenant_id=self.tenant, template_version='t1')
self.assertEqual({'template_functions': engine_response}, response)
self.m.VerifyAll()
def test_resource_schema(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'resource_schema', True)
req = self._get('/resource_types/ResourceWithProps')
type_name = 'ResourceWithProps'
engine_response = {
'resource_type': type_name,
'properties': {
'Foo': {'type': 'string', 'required': False},
},
'attributes': {
'foo': {'description': 'A generic attribute'},
'Foo': {'description': 'Another generic attribute'},
},
'support_status': {
'status': 'SUPPORTED',
'version': None,
'message': None,
},
}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('resource_schema', {'type_name': type_name,
'with_description': False}),
version='1.30'
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.resource_schema(req,
tenant_id=self.tenant,
type_name=type_name)
self.assertEqual(engine_response, response)
self.m.VerifyAll()
def test_resource_schema_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'resource_schema', True)
req = self._get('/resource_types/BogusResourceType')
type_name = 'BogusResourceType'
error = heat_exc.EntityNotFound(entity='Resource Type',
name='BogusResourceType')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('resource_schema', {'type_name': type_name,
'with_description': False}),
version='1.30'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.resource_schema,
req, tenant_id=self.tenant,
type_name=type_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_resource_schema_faulty_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'resource_schema', True)
req = self._get('/resource_types/FaultyTemplate')
type_name = 'FaultyTemplate'
error = heat_exc.InvalidGlobalResource(type_name='FaultyTemplate')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('resource_schema', {'type_name': type_name,
'with_description': False}),
version='1.30'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.resource_schema,
req, tenant_id=self.tenant,
type_name=type_name)
self.assertEqual(500, resp.json['code'])
self.assertEqual('InvalidGlobalResource', resp.json['error']['type'])
self.m.VerifyAll()
def test_resource_schema_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'resource_schema', False)
req = self._get('/resource_types/BogusResourceType')
type_name = 'BogusResourceType'
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.resource_schema,
req, tenant_id=self.tenant,
type_name=type_name)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_generate_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', True)
req = self._get('/resource_types/TEST_TYPE/template')
engine_response = {'Type': 'TEST_TYPE'}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('generate_template', {'type_name': 'TEST_TYPE',
'template_type': 'cfn'}),
version='1.9'
).AndReturn(engine_response)
self.m.ReplayAll()
self.controller.generate_template(req, tenant_id=self.tenant,
type_name='TEST_TYPE')
self.m.VerifyAll()
def test_generate_template_invalid_template_type(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', True)
params = {'template_type': 'invalid'}
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
req = self._get('/resource_types/TEST_TYPE/template',
params=params)
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.generate_template,
req, tenant_id=self.tenant,
type_name='TEST_TYPE')
self.assertIn('Template type is not supported: Invalid template '
'type "invalid", valid types are: cfn, hot.',
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_generate_template_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', True)
req = self._get('/resource_types/NOT_FOUND/template')
error = heat_exc.EntityNotFound(entity='Resource Type', name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('generate_template', {'type_name': 'NOT_FOUND',
'template_type': 'cfn'}),
version='1.9'
).AndRaise(tools.to_remote_error(error))
self.m.ReplayAll()
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.generate_template,
req, tenant_id=self.tenant,
type_name='NOT_FOUND')
self.assertEqual(404, resp.json['code'])
self.assertEqual('EntityNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_generate_template_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', False)
req = self._get('/resource_types/NOT_FOUND/template')
resp = tools.request_with_middleware(fault.FaultWrapper,
self.controller.generate_template,
req, tenant_id=self.tenant,
type_name='blah')
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
class StackSerializerTest(common.HeatTestCase):
def setUp(self):
super(StackSerializerTest, self).setUp()
self.serializer = stacks.StackSerializer()
def test_serialize_create(self):
result = {'stack':
{'id': '1',
'links': [{'href': 'location', "rel": "self"}]}}
response = webob.Response()
response = self.serializer.create(response, result)
self.assertEqual(201, response.status_int)
self.assertEqual('location', response.headers['Location'])
self.assertEqual('application/json', response.headers['Content-Type'])
| 42.204628 | 79 | 0.536997 | 11,169 | 116,738 | 5.378011 | 0.04611 | 0.045782 | 0.039456 | 0.046615 | 0.864068 | 0.851433 | 0.830523 | 0.813675 | 0.794613 | 0.781194 | 0 | 0.011292 | 0.343804 | 116,738 | 2,765 | 80 | 42.219892 | 0.772842 | 0.009903 | 0 | 0.758284 | 0 | 0 | 0.171453 | 0.025618 | 0 | 0 | 0 | 0 | 0.079014 | 1 | 0.048853 | false | 0.004673 | 0.007222 | 0 | 0.057774 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0a6449d9e80522fcbc838a339110efb1348099cc | 19,215 | py | Python | kubernetes/client/models/v1_csi_driver_spec.py | sthagen/kubernetes-client-python | 3a183048d7d568ba5ea418bcfb8f61713908d3ea | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1_csi_driver_spec.py | sthagen/kubernetes-client-python | 3a183048d7d568ba5ea418bcfb8f61713908d3ea | [
"Apache-2.0"
] | 3 | 2021-11-30T03:11:13.000Z | 2022-02-09T03:39:41.000Z | kubernetes/client/models/v1_csi_driver_spec.py | sthagen/kubernetes-client-python | 3a183048d7d568ba5ea418bcfb8f61713908d3ea | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.24
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1CSIDriverSpec(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'attach_required': 'bool',
'fs_group_policy': 'str',
'pod_info_on_mount': 'bool',
'requires_republish': 'bool',
'storage_capacity': 'bool',
'token_requests': 'list[StorageV1TokenRequest]',
'volume_lifecycle_modes': 'list[str]'
}
attribute_map = {
'attach_required': 'attachRequired',
'fs_group_policy': 'fsGroupPolicy',
'pod_info_on_mount': 'podInfoOnMount',
'requires_republish': 'requiresRepublish',
'storage_capacity': 'storageCapacity',
'token_requests': 'tokenRequests',
'volume_lifecycle_modes': 'volumeLifecycleModes'
}
def __init__(self, attach_required=None, fs_group_policy=None, pod_info_on_mount=None, requires_republish=None, storage_capacity=None, token_requests=None, volume_lifecycle_modes=None, local_vars_configuration=None): # noqa: E501
"""V1CSIDriverSpec - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._attach_required = None
self._fs_group_policy = None
self._pod_info_on_mount = None
self._requires_republish = None
self._storage_capacity = None
self._token_requests = None
self._volume_lifecycle_modes = None
self.discriminator = None
if attach_required is not None:
self.attach_required = attach_required
if fs_group_policy is not None:
self.fs_group_policy = fs_group_policy
if pod_info_on_mount is not None:
self.pod_info_on_mount = pod_info_on_mount
if requires_republish is not None:
self.requires_republish = requires_republish
if storage_capacity is not None:
self.storage_capacity = storage_capacity
if token_requests is not None:
self.token_requests = token_requests
if volume_lifecycle_modes is not None:
self.volume_lifecycle_modes = volume_lifecycle_modes
@property
def attach_required(self):
"""Gets the attach_required of this V1CSIDriverSpec. # noqa: E501
attachRequired indicates this CSI volume driver requires an attach operation (because it implements the CSI ControllerPublishVolume() method), and that the Kubernetes attach detach controller should call the attach volume interface which checks the volumeattachment status and waits until the volume is attached before proceeding to mounting. The CSI external-attacher coordinates with CSI volume driver and updates the volumeattachment status when the attach operation is complete. If the CSIDriverRegistry feature gate is enabled and the value is specified to false, the attach operation will be skipped. Otherwise the attach operation will be called. This field is immutable. # noqa: E501
:return: The attach_required of this V1CSIDriverSpec. # noqa: E501
:rtype: bool
"""
return self._attach_required
@attach_required.setter
def attach_required(self, attach_required):
"""Sets the attach_required of this V1CSIDriverSpec.
attachRequired indicates this CSI volume driver requires an attach operation (because it implements the CSI ControllerPublishVolume() method), and that the Kubernetes attach detach controller should call the attach volume interface which checks the volumeattachment status and waits until the volume is attached before proceeding to mounting. The CSI external-attacher coordinates with CSI volume driver and updates the volumeattachment status when the attach operation is complete. If the CSIDriverRegistry feature gate is enabled and the value is specified to false, the attach operation will be skipped. Otherwise the attach operation will be called. This field is immutable. # noqa: E501
:param attach_required: The attach_required of this V1CSIDriverSpec. # noqa: E501
:type: bool
"""
self._attach_required = attach_required
@property
def fs_group_policy(self):
"""Gets the fs_group_policy of this V1CSIDriverSpec. # noqa: E501
Defines if the underlying volume supports changing ownership and permission of the volume before being mounted. Refer to the specific FSGroupPolicy values for additional details. This field is immutable. Defaults to ReadWriteOnceWithFSType, which will examine each volume to determine if Kubernetes should modify ownership and permissions of the volume. With the default policy the defined fsGroup will only be applied if a fstype is defined and the volume's access mode contains ReadWriteOnce. # noqa: E501
:return: The fs_group_policy of this V1CSIDriverSpec. # noqa: E501
:rtype: str
"""
return self._fs_group_policy
@fs_group_policy.setter
def fs_group_policy(self, fs_group_policy):
"""Sets the fs_group_policy of this V1CSIDriverSpec.
Defines if the underlying volume supports changing ownership and permission of the volume before being mounted. Refer to the specific FSGroupPolicy values for additional details. This field is immutable. Defaults to ReadWriteOnceWithFSType, which will examine each volume to determine if Kubernetes should modify ownership and permissions of the volume. With the default policy the defined fsGroup will only be applied if a fstype is defined and the volume's access mode contains ReadWriteOnce. # noqa: E501
:param fs_group_policy: The fs_group_policy of this V1CSIDriverSpec. # noqa: E501
:type: str
"""
self._fs_group_policy = fs_group_policy
@property
def pod_info_on_mount(self):
"""Gets the pod_info_on_mount of this V1CSIDriverSpec. # noqa: E501
If set to true, podInfoOnMount indicates this CSI volume driver requires additional pod information (like podName, podUID, etc.) during mount operations. If set to false, pod information will not be passed on mount. Default is false. The CSI driver specifies podInfoOnMount as part of driver deployment. If true, Kubelet will pass pod information as VolumeContext in the CSI NodePublishVolume() calls. The CSI driver is responsible for parsing and validating the information passed in as VolumeContext. The following VolumeConext will be passed if podInfoOnMount is set to true. This list might grow, but the prefix will be used. \"csi.storage.k8s.io/pod.name\": pod.Name \"csi.storage.k8s.io/pod.namespace\": pod.Namespace \"csi.storage.k8s.io/pod.uid\": string(pod.UID) \"csi.storage.k8s.io/ephemeral\": \"true\" if the volume is an ephemeral inline volume defined by a CSIVolumeSource, otherwise \"false\" \"csi.storage.k8s.io/ephemeral\" is a new feature in Kubernetes 1.16. It is only required for drivers which support both the \"Persistent\" and \"Ephemeral\" VolumeLifecycleMode. Other drivers can leave pod info disabled and/or ignore this field. As Kubernetes 1.15 doesn't support this field, drivers can only support one mode when deployed on such a cluster and the deployment determines which mode that is, for example via a command line parameter of the driver. This field is immutable. # noqa: E501
:return: The pod_info_on_mount of this V1CSIDriverSpec. # noqa: E501
:rtype: bool
"""
return self._pod_info_on_mount
@pod_info_on_mount.setter
def pod_info_on_mount(self, pod_info_on_mount):
"""Sets the pod_info_on_mount of this V1CSIDriverSpec.
If set to true, podInfoOnMount indicates this CSI volume driver requires additional pod information (like podName, podUID, etc.) during mount operations. If set to false, pod information will not be passed on mount. Default is false. The CSI driver specifies podInfoOnMount as part of driver deployment. If true, Kubelet will pass pod information as VolumeContext in the CSI NodePublishVolume() calls. The CSI driver is responsible for parsing and validating the information passed in as VolumeContext. The following VolumeConext will be passed if podInfoOnMount is set to true. This list might grow, but the prefix will be used. \"csi.storage.k8s.io/pod.name\": pod.Name \"csi.storage.k8s.io/pod.namespace\": pod.Namespace \"csi.storage.k8s.io/pod.uid\": string(pod.UID) \"csi.storage.k8s.io/ephemeral\": \"true\" if the volume is an ephemeral inline volume defined by a CSIVolumeSource, otherwise \"false\" \"csi.storage.k8s.io/ephemeral\" is a new feature in Kubernetes 1.16. It is only required for drivers which support both the \"Persistent\" and \"Ephemeral\" VolumeLifecycleMode. Other drivers can leave pod info disabled and/or ignore this field. As Kubernetes 1.15 doesn't support this field, drivers can only support one mode when deployed on such a cluster and the deployment determines which mode that is, for example via a command line parameter of the driver. This field is immutable. # noqa: E501
:param pod_info_on_mount: The pod_info_on_mount of this V1CSIDriverSpec. # noqa: E501
:type: bool
"""
self._pod_info_on_mount = pod_info_on_mount
@property
def requires_republish(self):
"""Gets the requires_republish of this V1CSIDriverSpec. # noqa: E501
RequiresRepublish indicates the CSI driver wants `NodePublishVolume` being periodically called to reflect any possible change in the mounted volume. This field defaults to false. Note: After a successful initial NodePublishVolume call, subsequent calls to NodePublishVolume should only update the contents of the volume. New mount points will not be seen by a running container. # noqa: E501
:return: The requires_republish of this V1CSIDriverSpec. # noqa: E501
:rtype: bool
"""
return self._requires_republish
@requires_republish.setter
def requires_republish(self, requires_republish):
"""Sets the requires_republish of this V1CSIDriverSpec.
RequiresRepublish indicates the CSI driver wants `NodePublishVolume` being periodically called to reflect any possible change in the mounted volume. This field defaults to false. Note: After a successful initial NodePublishVolume call, subsequent calls to NodePublishVolume should only update the contents of the volume. New mount points will not be seen by a running container. # noqa: E501
:param requires_republish: The requires_republish of this V1CSIDriverSpec. # noqa: E501
:type: bool
"""
self._requires_republish = requires_republish
@property
def storage_capacity(self):
"""Gets the storage_capacity of this V1CSIDriverSpec. # noqa: E501
If set to true, storageCapacity indicates that the CSI volume driver wants pod scheduling to consider the storage capacity that the driver deployment will report by creating CSIStorageCapacity objects with capacity information. The check can be enabled immediately when deploying a driver. In that case, provisioning new volumes with late binding will pause until the driver deployment has published some suitable CSIStorageCapacity object. Alternatively, the driver can be deployed with the field unset or false and it can be flipped later when storage capacity information has been published. This field was immutable in Kubernetes <= 1.22 and now is mutable. # noqa: E501
:return: The storage_capacity of this V1CSIDriverSpec. # noqa: E501
:rtype: bool
"""
return self._storage_capacity
@storage_capacity.setter
def storage_capacity(self, storage_capacity):
"""Sets the storage_capacity of this V1CSIDriverSpec.
If set to true, storageCapacity indicates that the CSI volume driver wants pod scheduling to consider the storage capacity that the driver deployment will report by creating CSIStorageCapacity objects with capacity information. The check can be enabled immediately when deploying a driver. In that case, provisioning new volumes with late binding will pause until the driver deployment has published some suitable CSIStorageCapacity object. Alternatively, the driver can be deployed with the field unset or false and it can be flipped later when storage capacity information has been published. This field was immutable in Kubernetes <= 1.22 and now is mutable. # noqa: E501
:param storage_capacity: The storage_capacity of this V1CSIDriverSpec. # noqa: E501
:type: bool
"""
self._storage_capacity = storage_capacity
@property
def token_requests(self):
"""Gets the token_requests of this V1CSIDriverSpec. # noqa: E501
TokenRequests indicates the CSI driver needs pods' service account tokens it is mounting volume for to do necessary authentication. Kubelet will pass the tokens in VolumeContext in the CSI NodePublishVolume calls. The CSI driver should parse and validate the following VolumeContext: \"csi.storage.k8s.io/serviceAccount.tokens\": { \"<audience>\": { \"token\": <token>, \"expirationTimestamp\": <expiration timestamp in RFC3339>, }, ... } Note: Audience in each TokenRequest should be different and at most one token is empty string. To receive a new token after expiry, RequiresRepublish can be used to trigger NodePublishVolume periodically. # noqa: E501
:return: The token_requests of this V1CSIDriverSpec. # noqa: E501
:rtype: list[StorageV1TokenRequest]
"""
return self._token_requests
@token_requests.setter
def token_requests(self, token_requests):
"""Sets the token_requests of this V1CSIDriverSpec.
TokenRequests indicates the CSI driver needs pods' service account tokens it is mounting volume for to do necessary authentication. Kubelet will pass the tokens in VolumeContext in the CSI NodePublishVolume calls. The CSI driver should parse and validate the following VolumeContext: \"csi.storage.k8s.io/serviceAccount.tokens\": { \"<audience>\": { \"token\": <token>, \"expirationTimestamp\": <expiration timestamp in RFC3339>, }, ... } Note: Audience in each TokenRequest should be different and at most one token is empty string. To receive a new token after expiry, RequiresRepublish can be used to trigger NodePublishVolume periodically. # noqa: E501
:param token_requests: The token_requests of this V1CSIDriverSpec. # noqa: E501
:type: list[StorageV1TokenRequest]
"""
self._token_requests = token_requests
@property
def volume_lifecycle_modes(self):
"""Gets the volume_lifecycle_modes of this V1CSIDriverSpec. # noqa: E501
volumeLifecycleModes defines what kind of volumes this CSI volume driver supports. The default if the list is empty is \"Persistent\", which is the usage defined by the CSI specification and implemented in Kubernetes via the usual PV/PVC mechanism. The other mode is \"Ephemeral\". In this mode, volumes are defined inline inside the pod spec with CSIVolumeSource and their lifecycle is tied to the lifecycle of that pod. A driver has to be aware of this because it is only going to get a NodePublishVolume call for such a volume. For more information about implementing this mode, see https://kubernetes-csi.github.io/docs/ephemeral-local-volumes.html A driver can support one or more of these modes and more modes may be added in the future. This field is beta. This field is immutable. # noqa: E501
:return: The volume_lifecycle_modes of this V1CSIDriverSpec. # noqa: E501
:rtype: list[str]
"""
return self._volume_lifecycle_modes
@volume_lifecycle_modes.setter
def volume_lifecycle_modes(self, volume_lifecycle_modes):
"""Sets the volume_lifecycle_modes of this V1CSIDriverSpec.
volumeLifecycleModes defines what kind of volumes this CSI volume driver supports. The default if the list is empty is \"Persistent\", which is the usage defined by the CSI specification and implemented in Kubernetes via the usual PV/PVC mechanism. The other mode is \"Ephemeral\". In this mode, volumes are defined inline inside the pod spec with CSIVolumeSource and their lifecycle is tied to the lifecycle of that pod. A driver has to be aware of this because it is only going to get a NodePublishVolume call for such a volume. For more information about implementing this mode, see https://kubernetes-csi.github.io/docs/ephemeral-local-volumes.html A driver can support one or more of these modes and more modes may be added in the future. This field is beta. This field is immutable. # noqa: E501
:param volume_lifecycle_modes: The volume_lifecycle_modes of this V1CSIDriverSpec. # noqa: E501
:type: list[str]
"""
self._volume_lifecycle_modes = volume_lifecycle_modes
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1CSIDriverSpec):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1CSIDriverSpec):
return True
return self.to_dict() != other.to_dict()
| 66.030928 | 1,455 | 0.724226 | 2,566 | 19,215 | 5.315666 | 0.141465 | 0.022287 | 0.043109 | 0.03849 | 0.808138 | 0.766716 | 0.751246 | 0.713196 | 0.667962 | 0.625806 | 0 | 0.013059 | 0.214936 | 19,215 | 290 | 1,456 | 66.258621 | 0.89115 | 0.684778 | 0 | 0.089431 | 1 | 0 | 0.081171 | 0.013854 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162602 | false | 0 | 0.03252 | 0 | 0.333333 | 0.01626 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a564097a704ad171163fd41c266b5117bcfcb3f | 1,717 | py | Python | sdk/keyvault/azure-keyvault-keys/tests/test_context_manager_async.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/keyvault/azure-keyvault-keys/tests/test_context_manager_async.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/keyvault/azure-keyvault-keys/tests/test_context_manager_async.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from azure.keyvault.keys.aio import KeyClient
from azure.keyvault.keys.crypto.aio import CryptographyClient
import pytest
from _shared.helpers_async import AsyncMockTransport
@pytest.mark.asyncio
async def test_key_client_close():
transport = AsyncMockTransport()
client = KeyClient(vault_url="https://localhost", credential=object(), transport=transport)
await client.close()
assert transport.__aenter__.call_count == 0
assert transport.__aexit__.call_count == 1
@pytest.mark.asyncio
async def test_key_client_context_manager():
transport = AsyncMockTransport()
client = KeyClient(vault_url="https://localhost", credential=object(), transport=transport)
async with client:
assert transport.__aenter__.call_count == 1
assert transport.__aenter__.call_count == 1
assert transport.__aexit__.call_count == 1
@pytest.mark.asyncio
async def test_crypto_client_close():
transport = AsyncMockTransport()
client = CryptographyClient(key="https://localhost/a/b/c", credential=object(), transport=transport)
await client.close()
assert transport.__aenter__.call_count == 0
assert transport.__aexit__.call_count == 1
@pytest.mark.asyncio
async def test_crypto_client_context_manager():
transport = AsyncMockTransport()
client = CryptographyClient(key="https://localhost/a/b/c", credential=object(), transport=transport)
async with client:
assert transport.__aenter__.call_count == 1
assert transport.__aenter__.call_count == 1
assert transport.__aexit__.call_count == 1
| 34.34 | 104 | 0.725102 | 194 | 1,717 | 6.06701 | 0.257732 | 0.127443 | 0.067969 | 0.127443 | 0.824129 | 0.815633 | 0.791844 | 0.791844 | 0.751912 | 0.751912 | 0 | 0.006743 | 0.136284 | 1,717 | 49 | 105 | 35.040816 | 0.786918 | 0.082702 | 0 | 0.764706 | 0 | 0 | 0.050955 | 0 | 0 | 0 | 0 | 0 | 0.294118 | 1 | 0 | false | 0 | 0.117647 | 0 | 0.117647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a661e645e0390ba482bbfb7d03eaac310c68829 | 123 | py | Python | pyslave/drivers/nidaq/__init__.py | NS2LPS/pyslave | adc30a4f762f42811587be1f10b08f1d2865098e | [
"MIT"
] | 1 | 2020-10-01T15:12:12.000Z | 2020-10-01T15:12:12.000Z | pyslave/drivers/nidaq/__init__.py | NS2LPS/pyslave | adc30a4f762f42811587be1f10b08f1d2865098e | [
"MIT"
] | null | null | null | pyslave/drivers/nidaq/__init__.py | NS2LPS/pyslave | adc30a4f762f42811587be1f10b08f1d2865098e | [
"MIT"
] | null | null | null | __drivers__ = {'NI 9269': 'nidaq.nidaq.nidaq_generic_ao',
'NI 9239 (BNC)': 'nidaq.nidaq.nidaq_generic_ai',}
| 41 | 64 | 0.634146 | 16 | 123 | 4.375 | 0.5625 | 0.571429 | 0.428571 | 0.628571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.080808 | 0.195122 | 123 | 2 | 65 | 61.5 | 0.626263 | 0 | 0 | 0 | 0 | 0 | 0.617886 | 0.455285 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6ab616d032978d23bcd00f9a7bf1b458a0db0d28 | 233,273 | py | Python | pynetdicom/tests/test_assoc.py | WoonchanCho/pynetdicom | 137284817df3a605a677af5ee4294e961a7cc6d7 | [
"MIT"
] | null | null | null | pynetdicom/tests/test_assoc.py | WoonchanCho/pynetdicom | 137284817df3a605a677af5ee4294e961a7cc6d7 | [
"MIT"
] | null | null | null | pynetdicom/tests/test_assoc.py | WoonchanCho/pynetdicom | 137284817df3a605a677af5ee4294e961a7cc6d7 | [
"MIT"
] | null | null | null | """Association testing"""
from datetime import datetime
from io import BytesIO
import logging
import os
from pathlib import Path
import queue
import socket
import sys
import time
import threading
import pytest
from pydicom import dcmread
from pydicom.dataset import Dataset, FileMetaDataset
from pydicom.uid import (
UID,
ImplicitVRLittleEndian,
ExplicitVRLittleEndian,
JPEGBaseline,
JPEG2000,
JPEG2000Lossless,
DeflatedExplicitVRLittleEndian,
ExplicitVRBigEndian
)
from pynetdicom import (
AE, VerificationPresentationContexts, build_context, evt, _config,
debug_logger, build_role
)
from pynetdicom.association import Association
from pynetdicom.dimse_primitives import C_STORE, C_FIND, C_GET, C_MOVE
from pynetdicom.dsutils import encode, decode
from pynetdicom.events import Event
from pynetdicom._globals import MODE_REQUESTOR, MODE_ACCEPTOR
from pynetdicom.pdu_primitives import (
UserIdentityNegotiation, SOPClassExtendedNegotiation,
SOPClassCommonExtendedNegotiation, SCP_SCU_RoleSelectionNegotiation,
AsynchronousOperationsWindowNegotiation, A_ASSOCIATE
)
from pynetdicom.sop_class import (
VerificationSOPClass,
CTImageStorage, MRImageStorage, RTImageStorage,
PatientRootQueryRetrieveInformationModelFind,
PatientRootQueryRetrieveInformationModelGet,
PatientRootQueryRetrieveInformationModelMove,
PatientStudyOnlyQueryRetrieveInformationModelMove,
StudyRootQueryRetrieveInformationModelMove,
SecondaryCaptureImageStorage,
UnifiedProcedureStepPullSOPClass,
UnifiedProcedureStepPushSOPClass,
UnifiedProcedureStepWatchSOPClass
)
#debug_logger()
TEST_DS_DIR = os.path.join(os.path.dirname(__file__), 'dicom_files')
BIG_DATASET = dcmread(os.path.join(TEST_DS_DIR, 'RTImageStorage.dcm')) # 2.1 M
DATASET_PATH = os.path.join(TEST_DS_DIR, 'CTImageStorage.dcm')
BAD_DATASET_PATH = os.path.join(TEST_DS_DIR, 'CTImageStorage_bad_meta.dcm')
DATASET = dcmread(DATASET_PATH)
# JPEG2000Lossless
COMP_DATASET = dcmread(
os.path.join(TEST_DS_DIR, 'MRImageStorage_JPG2000_Lossless.dcm')
)
# DeflatedExplicitVRLittleEndian
DEFL_DATASET = dcmread(
os.path.join(TEST_DS_DIR, 'SCImageStorage_Deflated.dcm')
)
class DummyDIMSE(object):
def __init__(self):
self.status = None
self.msg_queue = queue.Queue()
def send_msg(self, rsp, context_id):
self.status = rsp.Status
self.rsp = rsp
def get_msg(self, block=False):
return None, None
class TestAssociation(object):
"""Run tests on Associtation."""
def setup(self):
"""This function runs prior to all test methods"""
self.ae = None
def teardown(self):
"""This function runs after all test methods"""
if self.ae:
self.ae.shutdown()
def test_bad_connection(self):
"""Test connect to non-AE"""
# sometimes causes hangs in Travis
ae = AE()
ae.add_requested_context(VerificationSOPClass)
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
assoc = ae.associate('localhost', 22)
assert not assoc.is_established
def test_connection_refused(self):
"""Test connection refused"""
ae = AE()
ae.add_requested_context(VerificationSOPClass)
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
assoc = ae.associate('localhost', 11120)
assert not assoc.is_established
def test_req_no_presentation_context(self):
"""Test rejection due to no acceptable presentation contexts"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert not assoc.is_established
assert assoc.is_aborted
scp.shutdown()
def test_peer_releases_assoc(self):
"""Test peer releases association"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
scp.active_associations[0].release()
assert assoc.is_released
assert not assoc.is_established
scp.shutdown()
def test_peer_aborts_assoc(self):
"""Test peer aborts association."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
scp.active_associations[0].abort()
assert assoc.is_aborted
assert not assoc.is_established
scp.shutdown()
def test_peer_rejects_assoc(self):
"""Test peer rejects assoc"""
self.ae = ae = AE()
ae.require_calling_aet = [b'HAHA NOPE']
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
time.sleep(0.1)
assert assoc.is_rejected
assert not assoc.is_established
scp.shutdown()
def test_assoc_release(self):
"""Test Association release"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
# Simple release
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.release()
assert assoc.is_released
assert not assoc.is_established
# Simple release, then release again
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.release()
assert assoc.is_released
assert not assoc.is_established
assert assoc.is_released
assoc.release()
assert assoc.is_released
# Simple release, then abort
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.release()
assert assoc.is_released
assert assoc.is_released
assert not assoc.is_established
assoc.abort()
assert not assoc.is_aborted
scp.shutdown()
def test_assoc_abort(self):
"""Test Association abort"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
# Simple abort
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.abort()
assert not assoc.is_established
assert assoc.is_aborted
# Simple abort, then release
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.abort()
assert not assoc.is_established
assert assoc.is_aborted
assoc.release()
assert assoc.is_aborted
assert not assoc.is_released
# Simple abort, then abort again
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.abort()
assert assoc.is_aborted
assert not assoc.is_established
assoc.abort()
scp.shutdown()
def test_scp_removed_ui(self):
"""Test SCP removes UI negotiation"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ui = UserIdentityNegotiation()
ui.user_identity_type = 0x01
ui.primary_field = b'pynetdicom'
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112, ext_neg=[ui])
assert assoc.is_established
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_scp_removed_ext_neg(self):
"""Test SCP removes ex negotiation"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ext = SOPClassExtendedNegotiation()
ext.sop_class_uid = '1.1.1.1'
ext.service_class_application_information = b'\x01\x02'
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112, ext_neg=[ext])
assert assoc.is_established
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_scp_removed_com_ext_neg(self):
"""Test SCP removes common ext negotiation"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ext = SOPClassCommonExtendedNegotiation()
ext.related_general_sop_class_identification = ['1.2.1']
ext.sop_class_uid = '1.1.1.1'
ext.service_class_uid = '1.1.3'
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112, ext_neg=[ext])
assert assoc.is_established
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_scp_assoc_limit(self):
"""Test SCP limits associations"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.maximum_associations = 1
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae = AE()
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc_2 = ae.associate('localhost', 11112)
assert not assoc_2.is_established
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_require_called_aet(self):
"""SCP requires matching called AET"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.require_called_aet = True
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert not assoc.is_established
assert assoc.is_rejected
scp.shutdown()
def test_require_calling_aet(self):
"""SCP requires matching called AET"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.require_calling_aet = [b'TESTSCP']
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert not assoc.is_established
assert assoc.is_rejected
scp.shutdown()
def test_dimse_timeout(self):
"""Test that the DIMSE timeout works"""
def handle(event):
time.sleep(0.2)
return 0x0000
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.dimse_timeout = 0.1
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_ECHO, handle)]
)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.dimse_timeout == 0.1
assert assoc.dimse.dimse_timeout == 0.1
assert assoc.is_established
assoc.send_c_echo()
assoc.release()
assert not assoc.is_released
assert assoc.is_aborted
scp.shutdown()
def test_multiple_association_release_cycles(self):
"""Test repeatedly associating and releasing"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
for ii in range(10):
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert not assoc.is_released
assoc.send_c_echo()
assoc.release()
assert assoc.is_released
assert not assoc.is_established
scp.shutdown()
def test_local(self):
"""Test Association.local."""
ae = AE()
assoc = Association(ae, 'requestor')
assoc.requestor.ae_title = ae.ae_title
assert assoc.local['ae_title'] == b'PYNETDICOM '
assoc = Association(ae, 'acceptor')
assoc.acceptor.ae_title = ae.ae_title
assert assoc.local['ae_title'] == b'PYNETDICOM '
def test_remote(self):
"""Test Association.local."""
ae = AE()
assoc = Association(ae, 'requestor')
assert assoc.remote['ae_title'] == b''
assoc = Association(ae, 'acceptor')
assert assoc.remote['ae_title'] == b''
def test_mode_raises(self):
"""Test exception is raised if invalid mode."""
msg = (
r"Invalid association `mode` value, must be either 'requestor' or "
"'acceptor'"
)
with pytest.raises(ValueError, match=msg):
assoc = Association(None, 'nope')
def test_setting_socket_override_raises(self):
"""Test that set_socket raises exception if socket set."""
ae = AE()
assoc = Association(ae, MODE_REQUESTOR)
assoc.dul.socket = 'abc'
msg = r"The Association already has a socket set."
with pytest.raises(RuntimeError, match=msg):
assoc.set_socket('cba')
assert assoc.dul.socket == 'abc'
def test_invalid_context(self, caplog):
"""Test receiving an message with invalid context ID"""
with caplog.at_level(logging.INFO, logger='pynetdicom'):
ae = AE()
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage)
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112)
assoc.dimse_timeout = 0.1
assert assoc.is_established
assoc._accepted_cx[3] = assoc._rejected_cx[0]
assoc._accepted_cx[3].result = 0x00
assoc._accepted_cx[3]._as_scu = True
assoc._accepted_cx[3]._as_scp = True
ds = Dataset()
ds.SOPClassUID = CTImageStorage
ds.SOPInstanceUID = '1.2.3.4'
ds.file_meta = FileMetaDataset()
ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian
result = assoc.send_c_store(ds)
time.sleep(0.1)
assert assoc.is_aborted
assert (
'Received DIMSE message with invalid or rejected context ID'
) in caplog.text
scp.shutdown()
def test_get_events(self):
"""Test Association.get_events()."""
ae = AE()
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert evt.EVT_C_STORE in assoc.get_events()
assert evt.EVT_USER_ID in assoc.get_events()
def test_requested_handler_abort(self):
"""Test the EVT_REQUESTED handler sending abort."""
def handle_req(event):
event.assoc.acse.send_abort(0x00)
time.sleep(0.1)
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
hh = [(evt.EVT_REQUESTED, handle_req)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert not assoc.is_established
assert assoc.is_aborted
scp.shutdown()
def test_requested_handler_reject(self):
"""Test the EVT_REQUESTED handler sending reject."""
def handle_req(event):
event.assoc.acse.send_reject(0x02, 0x01, 0x01)
# Give the requestor time to process the message before killing
# the connection
time.sleep(0.1)
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
hh = [(evt.EVT_REQUESTED, handle_req)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert not assoc.is_established
assert assoc.is_rejected
scp.shutdown()
def test_unknown_abort_source(self):
"""Test an unknown abort source handled correctly #561"""
def handle_req(event):
pdu = b"\x07\x00\x00\x00\x00\x04\x00\x00\x01\x00"
event.assoc.dul.socket.send(pdu)
# Give the requestor time to process the message before killing
# the connection
time.sleep(0.1)
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
hh = [(evt.EVT_REQUESTED, handle_req)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert not assoc.is_established
assert assoc.is_aborted
scp.shutdown()
class TestCStoreSCP(object):
"""Tests for Association._c_store_scp()."""
# Used with C-GET (always) and C-MOVE (over the same association)
def setup(self):
self.ae = None
def teardown(self):
if self.ae:
self.ae.shutdown()
def test_no_context(self):
"""Test correct response if no valid presentation context."""
def handle(event):
return 0x0000
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
ae.add_supported_context(RTImageStorage)
# Storage SCP
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle)]
)
ae.add_requested_context(RTImageStorage)
role = build_role(CTImageStorage, scu_role=False, scp_role=True)
assoc = ae.associate('localhost', 11112, ext_neg=[role])
assert assoc.is_established
req = C_STORE()
req.MessageID = 1
req.AffectedSOPClassUID = DATASET.SOPClassUID
req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID
req.Priority = 1
req._context_id = 1
bytestream = encode(DATASET, True, True)
req.DataSet = BytesIO(bytestream)
assoc.dimse = DummyDIMSE()
assoc._c_store_scp(req)
assert assoc.dimse.status == 0x0122
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_handler_exception(self):
"""Test correct response if exception raised by handler."""
def handle(event):
raise ValueError()
return 0x0000
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
# Storage SCP
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=False, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle)]
)
assert assoc.is_established
req = C_STORE()
req.MessageID = 1
req.AffectedSOPClassUID = DATASET.SOPClassUID
req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID
req.Priority = 1
req._context_id = 1
bytestream = encode(DATASET, True, True)
req.DataSet = BytesIO(bytestream)
assoc.dimse = DummyDIMSE()
assoc._c_store_scp(req)
assert assoc.dimse.status == 0xC211
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_handler_status_ds_no_status(self):
"""Test handler with status dataset with no Status element."""
def handle(event):
return Dataset()
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
# Storage SCP
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=False, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle)]
)
assert assoc.is_established
req = C_STORE()
req.MessageID = 1
req.AffectedSOPClassUID = DATASET.SOPClassUID
req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID
req.Priority = 1
req._context_id = 1
bytestream = encode(DATASET, True, True)
req.DataSet = BytesIO(bytestream)
assoc.dimse = DummyDIMSE()
assoc._c_store_scp(req)
assert assoc.dimse.status == 0xC001
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_handler_status_ds_unknown_elems(self):
"""Test handler with status dataset with an unknown element."""
def handle(event):
ds = Dataset()
ds.Status = 0x0000
ds.PatientName = 'ABCD'
return ds
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
# Storage SCP
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=False, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle)]
)
assert assoc.is_established
req = C_STORE()
req.MessageID = 1
req.AffectedSOPClassUID = DATASET.SOPClassUID
req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID
req.Priority = 1
req._context_id = 1
bytestream = encode(DATASET, True, True)
req.DataSet = BytesIO(bytestream)
assoc.dimse = DummyDIMSE()
assoc._c_store_scp(req)
rsp = assoc.dimse.rsp
assert rsp.Status == 0x0000
assert not hasattr(rsp, 'PatientName')
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_handler_invalid_status(self):
"""Test handler with invalid status."""
def handle(event):
return 'abcd'
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
# Storage SCP
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=False, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle)]
)
assert assoc.is_established
req = C_STORE()
req.MessageID = 1
req.AffectedSOPClassUID = DATASET.SOPClassUID
req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID
req.Priority = 1
req._context_id = 1
bytestream = encode(DATASET, True, True)
req.DataSet = BytesIO(bytestream)
assoc.dimse = DummyDIMSE()
assoc._c_store_scp(req)
assert assoc.dimse.status == 0xC002
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_handler_unknown_status(self):
"""Test handler with invalid status."""
def handle(event):
return 0xDEFA
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
# Storage SCP
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=False, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle)]
)
assert assoc.is_established
req = C_STORE()
req.MessageID = 1
req.AffectedSOPClassUID = DATASET.SOPClassUID
req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID
req.Priority = 1
req._context_id = 1
bytestream = encode(DATASET, True, True)
req.DataSet = BytesIO(bytestream)
assoc.dimse = DummyDIMSE()
assoc._c_store_scp(req)
assert assoc.dimse.status == 0xDEFA
assoc.release()
assert assoc.is_released
scp.shutdown()
class TestAssociationSendCEcho(object):
"""Run tests on Assocation evt.EVT_C_ECHO handler."""
def setup(self):
"""Run prior to each test"""
self.ae = None
def teardown(self):
"""Clear any active threads"""
if self.ae:
self.ae.shutdown()
def test_must_be_associated(self):
"""Test can't send without association."""
# Test raise if assoc not established
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.release()
assert assoc.is_released
assert not assoc.is_established
with pytest.raises(RuntimeError):
assoc.send_c_echo()
scp.shutdown()
def test_no_abstract_syntax_match(self):
"""Test SCU when no accepted abstract syntax"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
assoc.send_c_echo()
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_none(self):
"""Test no response from peer"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
class DummyDIMSE():
msg_queue = queue.Queue()
def send_msg(*args, **kwargs): return
def get_msg(*args, **kwargs): return None, None
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
if assoc.is_established:
assoc.send_c_echo()
assert assoc.is_aborted
scp.shutdown()
def test_rsp_invalid(self):
"""Test invalid response received from peer"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
class DummyResponse():
is_valid_response = False
class DummyDIMSE():
msg_queue = queue.Queue()
def send_msg(*args, **kwargs): return
def get_msg(*args, **kwargs): return None, DummyResponse()
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
if assoc.is_established:
assoc.send_c_echo()
assert assoc.is_aborted
scp.shutdown()
def test_rsp_success(self):
"""Test receiving a success response from the peer"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_echo()
assert result.Status == 0x0000
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_failure(self):
"""Test receiving a failure response from the peer"""
def handler(event):
return 0x0210
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
handlers = [(evt.EVT_C_ECHO, handler)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(VerificationSOPClass)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_echo()
assert result.Status == 0x0210
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_unknown_status(self):
"""Test unknown status value returned by peer"""
def handler(event):
return 0xFFF0
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
handlers = [(evt.EVT_C_ECHO, handler)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(VerificationSOPClass)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_echo()
assert result.Status == 0xFFF0
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_multi_status(self):
"""Test receiving a status with extra elements"""
def handler(event):
ds = Dataset()
ds.Status = 0x0122
ds.ErrorComment = 'Some comment'
return ds
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
handlers = [(evt.EVT_C_ECHO, handler)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(VerificationSOPClass)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_echo()
assert result.Status == 0x0122
assert result.ErrorComment == 'Some comment'
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_abort_during(self):
"""Test aborting the association during message exchange"""
def handle(event):
event.assoc.abort()
return 0x0000
self.ae = ae = AE()
ae.acse_timeout = 1
ae.dimse_timeout = 1
ae.network_timeout = 1
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_ECHO, handle)]
)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_echo()
assert result == Dataset()
time.sleep(0.1)
assert assoc.is_aborted
scp.shutdown()
def test_run_accept_scp_not_implemented(self):
"""Test association is aborted if non-implemented SCP requested."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context('1.2.3.4')
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context('1.2.3.4')
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
status = assoc.send_n_delete('1.2.3.4', '1.2.3')
assert status == Dataset()
time.sleep(0.1)
assert assoc.is_aborted
scp.shutdown()
def test_rejected_contexts(self):
"""Test receiving a success response from the peer"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(assoc.rejected_contexts) == 1
cx = assoc.rejected_contexts[0]
assert cx.abstract_syntax == CTImageStorage
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_common_ext_neg_no_general_sop(self):
"""Test sending SOP Class Common Extended Negotiation."""
# With no Related General SOP Classes
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.acse_timeout = 5
ae.dimse_timeout = 5
item = SOPClassCommonExtendedNegotiation()
item.sop_class_uid = '1.2.3'
item.service_class_uid = '2.3.4'
assoc = ae.associate('localhost', 11112, ext_neg=[item])
assert assoc.is_established
result = assoc.send_c_echo()
assert result.Status == 0x0000
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_changing_network_timeout(self):
"""Test changing timeout after associated."""
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
ae.network_timeout = 1
assert assoc.dul.network_timeout == 1
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_network_times_out_requestor(self):
"""Regression test for #286."""
self.ae = ae = AE()
ae.add_requested_context(VerificationSOPClass)
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert assoc.network_timeout == 60
assoc.network_timeout = 0.5
assert assoc.network_timeout == 0.5
time.sleep(1.0)
assert assoc.is_aborted
scp.shutdown()
def test_network_times_out_acceptor(self):
"""Regression test for #286."""
self.ae = ae = AE()
ae.add_requested_context(VerificationSOPClass)
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11113), block=False)
assoc = ae.associate('localhost', 11113)
ae.network_timeout = 0.5
assoc.network_timeout = 60
assert assoc.network_timeout == 60
assert assoc.is_established
time.sleep(1.0)
assert assoc.is_aborted
scp.shutdown()
class TestAssociationSendCStore(object):
"""Run tests on Assocation send_c_store."""
def setup(self):
"""Run prior to each test"""
self.ae = None
def teardown(self):
"""Clear any active threads"""
if self.ae:
self.ae.shutdown()
_config.STORE_SEND_CHUNKED_DATASET = False
def test_must_be_associated(self):
"""Test SCU can't send without association."""
# Test raise if assoc not established
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assoc.release()
assert assoc.is_released
assert not assoc.is_established
with pytest.raises(RuntimeError):
assoc.send_c_store(DATASET)
scp.shutdown()
def test_no_abstract_syntax_match(self):
"""Test SCU when no accepted abstract syntax"""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
assoc.send_c_store(DATASET)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_bad_priority(self):
"""Test bad priority raises exception"""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
assoc.send_c_store(DATASET, priority=0x0003)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_fail_encode_dataset(self):
"""Test failure if unable to encode dataset"""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
ds = Dataset()
ds.SOPClassUID = CTImageStorage
ds.SOPInstanceUID = '1.2.3'
ds.PerimeterValue = b'\x00\x01'
ds.file_meta = FileMetaDataset()
ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian
msg = r"Failed to encode the supplied dataset"
with pytest.raises(ValueError, match=msg):
assoc.send_c_store(ds)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_encode_compressed_dataset(self):
"""Test sending a dataset with a compressed transfer syntax """
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(MRImageStorage, JPEG2000Lossless)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(MRImageStorage, JPEG2000Lossless)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_store(COMP_DATASET)
assert result.Status == 0x0000
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_none(self):
"""Test no response from peer"""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
class DummyDIMSE():
msg_queue = queue.Queue()
def send_msg(*args, **kwargs): return
def get_msg(*args, **kwargs): return None, None
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
status = assoc.send_c_store(DATASET)
assert status == Dataset()
assert assoc.is_aborted
scp.shutdown()
def test_rsp_invalid(self):
"""Test invalid DIMSE message received from peer"""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
class DummyResponse():
is_valid_response = False
class DummyDIMSE():
msg_queue = queue.Queue()
def send_msg(*args, **kwargs): return
def get_msg(*args, **kwargs): return DummyResponse(), None
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
status = assoc.send_c_store(DATASET)
assert assoc.is_aborted
assert status == Dataset()
scp.shutdown()
def test_rsp_failure(self):
"""Test receiving a failure response from the peer"""
def handle_store(event):
return 0xC000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
status = assoc.send_c_store(DATASET)
assert status.Status == 0xC000
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_warning(self):
"""Test receiving a warning response from the peer"""
def handle_store(event):
return 0xB000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
status = assoc.send_c_store(DATASET)
assert status.Status == 0xB000
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_success(self):
"""Test receiving a success response from the peer"""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
status = assoc.send_c_store(DATASET)
assert status.Status == 0x0000
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_unknown_status(self):
"""Test unknown status value returned by peer"""
def handle_store(event):
return 0xFFF0
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
status = assoc.send_c_store(DATASET)
assert status.Status == 0xFFF0
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_dataset_no_sop_class_raises(self):
"""Test sending a dataset without SOPClassUID raises."""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
ds = Dataset()
ds.SOPInstanceUID = '1.2.3.4'
ds.file_meta = FileMetaDataset()
ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian
assert assoc.is_established
assert 'SOPClassUID' not in ds
msg = (
f"Unable to send the dataset as one or more required "
f"element are missing: SOPClassUID"
)
with pytest.raises(AttributeError, match=msg):
assoc.send_c_store(ds)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_dataset_no_transfer_syntax_raises(self):
"""Test sending a dataset without TransferSyntaxUID raises."""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
ds = Dataset()
ds.SOPInstanceUID = '1.2.3.4'
ds.SOPClassUID = CTImageStorage
assert 'file_meta' not in ds
msg = (
r"Unable to determine the presentation context to use with "
r"`dataset` as it contains no '\(0002,0010\) Transfer Syntax "
r"UID' file meta information element"
)
with pytest.raises(AttributeError, match=msg):
assoc.send_c_store(ds)
ds.file_meta = FileMetaDataset()
assert 'TransferSyntaxUID' not in ds.file_meta
msg = (
r"Unable to determine the presentation context to use with "
r"`dataset` as it contains no '\(0002,0010\) Transfer Syntax "
r"UID' file meta information element"
)
with pytest.raises(AttributeError, match=msg):
assoc.send_c_store(ds)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_functional_common_ext_neg(self):
"""Test functioning of the SOP Class Common Extended negotiation."""
def handle_ext(event):
return event.items
def handle_store(event):
return 0x0000
handlers = [
(evt.EVT_C_STORE, handle_store),
(evt.EVT_SOP_COMMON, handle_ext)
]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
ae.add_supported_context('1.2.3')
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
ae.add_requested_context('1.2.3')
req = {
'1.2.3' : ('1.2.840.10008.4.2', []),
'1.2.3.1' : ('1.2.840.10008.4.2', ['1.1.1', '1.4.2']),
'1.2.3.4' : ('1.2.111111', []),
'1.2.3.5' : ('1.2.111111', ['1.2.4', '1.2.840.10008.1.1']),
}
ext_neg = []
for kk, vv in req.items():
item = SOPClassCommonExtendedNegotiation()
item.sop_class_uid = kk
item.service_class_uid = vv[0]
item.related_general_sop_class_identification = vv[1]
ext_neg.append(item)
assoc = ae.associate('localhost', 11112, ext_neg=ext_neg)
assert assoc.is_established
ds = Dataset()
ds.SOPClassUID = '1.2.3'
ds.SOPInstanceUID = '1.2.3.4'
ds.file_meta = FileMetaDataset()
ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian
status = assoc.send_c_store(ds)
assert status.Status == 0x0000
assoc.release()
scp.shutdown()
def test_using_filepath(self):
"""Test using a file path to a dataset."""
recv = []
def handle_store(event):
recv.append(event.dataset)
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert isinstance(DATASET_PATH, str)
status = assoc.send_c_store(DATASET_PATH)
assert status.Status == 0x0000
p = Path(DATASET_PATH).resolve()
assert isinstance(p, Path)
status = assoc.send_c_store(p)
assert status.Status == 0x0000
assoc.release()
assert assoc.is_released
scp.shutdown()
assert 2 == len(recv)
for ds in recv:
assert "CompressedSamples^CT1" == ds.PatientName
assert "DataSetTrailingPadding" in ds
def test_using_filepath_chunks(self):
"""Test chunking send."""
_config.STORE_SEND_CHUNKED_DATASET = True
recv = []
def handle_store(event):
recv.append(event.dataset)
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert isinstance(DATASET_PATH, str)
status = assoc.send_c_store(DATASET_PATH)
assert status.Status == 0x0000
p = Path(DATASET_PATH).resolve()
assert isinstance(p, Path)
status = assoc.send_c_store(p)
assert status.Status == 0x0000
assoc.release()
assert assoc.is_released
ae.maximum_pdu_size = 0
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
status = assoc.send_c_store(p)
assert status.Status == 0x0000
assoc.release()
assert assoc.is_released
scp.shutdown()
assert 3 == len(recv)
for ds in recv:
assert not hasattr(ds, "file_meta")
assert "CompressedSamples^CT1" == ds.PatientName
assert 126 == len(ds.DataSetTrailingPadding)
def test_using_filepath_chunks_missing(self):
"""Test receiving a success response from the peer"""
_config.STORE_SEND_CHUNKED_DATASET = True
recv = []
def handle_store(event):
recv.append(event.dataset)
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert isinstance(BAD_DATASET_PATH, str)
msg = (
r"one or more required file meta information elements are "
r"missing: MediaStorageSOPClassUID"
)
with pytest.raises(AttributeError, match=msg):
assoc.send_c_store(BAD_DATASET_PATH)
assoc.release()
assert assoc.is_released
scp.shutdown()
# Regression tests
def test_no_send_mismatch(self):
"""Test sending a dataset with mismatched transfer syntax (206)."""
def handle_store(event):
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian)
assoc = ae.associate('localhost', 11112)
ds = Dataset()
ds.SOPClassUID = CTImageStorage
ds.SOPInstanceUID = '1.2.3.4'
ds.file_meta = FileMetaDataset()
ds.file_meta.TransferSyntaxUID = JPEGBaseline
assert assoc.is_established
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer with 'JPEG Baseline \(Process 1\)' "
r"transfer syntax for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc.send_c_store(ds)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_send_deflated(self):
"""Test sending a deflated encoded dataset (482)."""
recv_ds = []
def handle_store(event):
recv_ds.append(event.dataset)
return 0x0000
handlers = [(evt.EVT_C_STORE, handle_store)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(
SecondaryCaptureImageStorage, DeflatedExplicitVRLittleEndian
)
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(
SecondaryCaptureImageStorage, DeflatedExplicitVRLittleEndian
)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
status = assoc.send_c_store(DEFL_DATASET)
assoc.release()
assert assoc.is_released
scp.shutdown()
assert '^^^^' == recv_ds[0].PatientName
class TestAssociationSendCFind(object):
"""Run tests on Assocation send_c_find."""
def setup(self):
"""Run prior to each test"""
self.ds = Dataset()
self.ds.PatientName = '*'
self.ds.QueryRetrieveLevel = "PATIENT"
self.ae = None
def teardown(self):
"""Clear any active threads"""
if self.ae:
self.ae.shutdown()
def test_must_be_associated(self):
"""Test can't send without association."""
# Test raise if assoc not established
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assoc.release()
assert assoc.is_released
assert not assoc.is_established
with pytest.raises(RuntimeError):
next(assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind)
)
scp.shutdown()
def test_no_abstract_syntax_match(self):
"""Test when no accepted abstract syntax"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
def test():
next(assoc.send_c_find(
self.ds,
PatientRootQueryRetrieveInformationModelFind)
)
with pytest.raises(ValueError):
test()
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_bad_query_model(self):
"""Test invalid query_model value"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
next(assoc.send_c_find(self.ds, query_model='XXX'))
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_fail_encode_identifier(self):
"""Test a failure in encoding the Identifier dataset"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(
PatientRootQueryRetrieveInformationModelFind,
ExplicitVRLittleEndian
)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
DATASET.PerimeterValue = b'\x00\x01'
def test():
next(assoc.send_c_find(
DATASET, PatientRootQueryRetrieveInformationModelFind)
)
with pytest.raises(ValueError):
test()
assoc.release()
assert assoc.is_released
del DATASET.PerimeterValue # Fix up our changes
scp.shutdown()
def test_rsp_failure(self):
"""Test receiving a failure response from the peer"""
def handle(event):
yield 0xA700, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind):
assert status.Status == 0xA700
assert ds is None
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_pending(self):
"""Test receiving a pending response from the peer"""
def handle(event):
yield 0xFF00, self.ds
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
)
(status, ds) = next(result)
assert status.Status == 0xFF00
assert 'PatientName' in ds
(status, ds) = next(result)
assert status.Status == 0x0000
assert ds is None
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_success(self):
"""Test receiving a success response from the peer"""
def handle(event):
yield 0x0000, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
):
assert status.Status == 0x0000
assert ds is None
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_empty(self):
"""Test receiving a success response from the peer"""
# No matches
def handle(event):
pass
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
):
assert status.Status == 0x0000
assert ds is None
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_cancel(self):
"""Test receiving a cancel response from the peer"""
def handle(event):
yield 0xFE00, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
):
assert status.Status == 0xFE00
assert ds is None
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_invalid(self):
"""Test invalid DIMSE message response received from peer"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
class DummyResponse():
is_valid_response = False
class DummyDIMSE():
def send_msg(*args, **kwargs): return
def get_msg(*args, **kwargs): return DummyResponse(), None
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
for (_, _) in assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
):
pass
assert assoc.is_aborted
scp.shutdown()
def test_rsp_unknown_status(self):
"""Test unknown status value returned by peer"""
def handle(event):
yield 0xFFF0, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
):
assert status.Status == 0xFFF0
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_bad_dataset(self):
"""Test bad dataset returned by evt.EVT_C_FIND handler"""
def handle(event):
def test(): pass
yield 0xFF00, test
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(
PatientRootQueryRetrieveInformationModelFind,
ExplicitVRLittleEndian
)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
model = PatientRootQueryRetrieveInformationModelFind
ae.add_requested_context(model, ExplicitVRLittleEndian)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_find(self.ds, model):
assert status.Status in range(0xC000, 0xD000)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_connection_timeout(self):
"""Test the connection timing out"""
def handle(event):
yield 0x0000
hh = [(evt.EVT_C_FIND, handle)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
class DummyMessage():
is_valid_response = True
Identifier = None
Status = 0x0000
STATUS_OPTIONAL_KEYWORDS = []
class DummyDIMSE():
def send_msg(*args, **kwargs):
return
def get_msg(*args, **kwargs):
return None, None
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
results = assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
)
assert next(results) == (Dataset(), None)
with pytest.raises(StopIteration):
next(results)
assert assoc.is_aborted
scp.shutdown()
def test_decode_failure(self):
"""Test the connection timing out"""
def handle(event):
yield 0x0000
hh = [(evt.EVT_C_FIND, handle)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind,
ExplicitVRLittleEndian)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
class DummyDIMSE():
msg_queue = queue.Queue()
def send_msg(*args, **kwargs):
return
def get_msg(*args, **kwargs):
def dummy():
pass
rsp = C_FIND()
rsp.Status = 0xFF00
rsp.MessageIDBeingRespondedTo = 1
rsp._dataset = dummy
return 1, rsp
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
results = assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
)
status, ds = next(results)
assert status.Status == 0xFF00
assert ds is None
scp.shutdown()
def test_rsp_not_find(self, caplog):
"""Test receiving a non C-FIND message in response."""
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
ae = AE()
assoc = Association(ae, 'requestor')
assoc._is_paused = True
dimse = assoc.dimse
dimse.msg_queue.put((3, C_STORE()))
cx = build_context(PatientRootQueryRetrieveInformationModelFind)
cx._as_scu = True
cx._as_scp = False
cx.context_id = 1
assoc._accepted_cx = {1 : cx}
identifier = Dataset()
identifier.PatientID = '*'
assoc.is_established = True
results = assoc.send_c_find(
identifier, PatientRootQueryRetrieveInformationModelFind
)
status, ds = next(results)
assert status == Dataset()
assert ds is None
with pytest.raises(StopIteration):
next(results)
assert (
'Received an unexpected C-STORE message from the peer'
) in caplog.text
assert assoc.is_aborted
def test_rsp_invalid_find(self, caplog):
"""Test receiving an invalid C-FIND message in response."""
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
ae = AE()
assoc = Association(ae, 'requestor')
assoc._is_paused = True
dimse = assoc.dimse
dimse.msg_queue.put((3, C_FIND()))
cx = build_context(PatientRootQueryRetrieveInformationModelFind)
cx._as_scu = True
cx._as_scp = False
cx.context_id = 1
assoc._accepted_cx = {1 : cx}
identifier = Dataset()
identifier.PatientID = '*'
assoc.is_established = True
results = assoc.send_c_find(
identifier, PatientRootQueryRetrieveInformationModelFind
)
status, ds = next(results)
assert status == Dataset()
assert ds is None
with pytest.raises(StopIteration):
next(results)
assert (
'Received an invalid C-FIND response from the peer'
) in caplog.text
assert assoc.is_aborted
def test_query_uid_public(self):
"""Test using a public UID for the query model"""
def handle(event):
yield 0x0000, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
responses = assoc.send_c_find(
self.ds, PatientRootQueryRetrieveInformationModelFind
)
for (status, ds) in responses:
assert status.Status == 0x0000
assert ds is None
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_query_uid_private(self, caplog):
"""Test using a private UID for the query model"""
def handle(event):
yield 0x0000, None
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context('1.2.3.4')
scp = ae.start_server(
('', 11112), block=False,
evt_handlers=[(evt.EVT_C_FIND, handle)]
)
ae.add_requested_context('1.2.3.4')
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
responses = assoc.send_c_find(self.ds, '1.2.3.4')
scp.shutdown()
msg = (
"No supported service class available for the SOP Class "
"UID '1.2.3.4'"
)
assert msg in caplog.text
class TestAssociationSendCCancel(object):
"""Run tests on Assocation send_c_cancel."""
def setup(self):
"""Run prior to each test"""
self.ae = None
def teardown(self):
"""Clear any active threads"""
if self.ae:
self.ae.shutdown()
def test_must_be_associated(self):
"""Test can't send without association."""
# Test raise if assoc not established
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assoc.release()
assert assoc.is_released
assert not assoc.is_established
with pytest.raises(RuntimeError):
assoc.send_c_cancel(1, 1)
scp.shutdown()
def test_good_send(self):
"""Test send_c_cancel_move"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.send_c_cancel(1, 1)
scp.shutdown()
class TestAssociationSendCGet(object):
"""Run tests on Assocation send_c_get."""
def setup(self):
"""Run prior to each test"""
self.ds = Dataset()
self.ds.PatientName = '*'
self.ds.QueryRetrieveLevel = "PATIENT"
self.good = Dataset()
self.good.file_meta = FileMetaDataset()
self.good.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian
self.good.SOPClassUID = CTImageStorage
self.good.SOPInstanceUID = '1.1.1'
self.good.PatientName = 'Test'
self.ae = None
def teardown(self):
"""Clear any active threads"""
if self.ae:
self.ae.shutdown()
def test_must_be_associated(self):
"""Test can't send without association."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
assoc = ae.associate('localhost', 11112)
assoc.release()
assert assoc.is_released
assert not assoc.is_established
with pytest.raises(RuntimeError):
next(assoc.send_c_get(
self.ds, PatientRootQueryRetrieveInformationModelGet)
)
scp.shutdown()
def test_must_be_scp(self):
"""Test failure if not SCP for storage context."""
store_pname = []
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0x0000
def handle_get(event):
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(
CTImageStorage, scu_role=True, scp_role=True
)
scp = ae.start_server(
('', 11112),
block=False,
evt_handlers=[(evt.EVT_C_GET, handle_get)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
#ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=True, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
assert assoc.is_established
result = assoc.send_c_get(
self.ds, PatientRootQueryRetrieveInformationModelGet
)
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xb000
assert ds.FailedSOPInstanceUIDList == ['1.1.1', '1.1.1']
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_no_abstract_syntax_match(self):
"""Test when no accepted abstract syntax"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
next(assoc.send_c_get(
self.ds, PatientRootQueryRetrieveInformationModelGet)
)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_bad_query_model(self):
"""Test bad query model parameter"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
next(assoc.send_c_get(self.ds, query_model='X'))
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_fail_encode_identifier(self):
"""Test a failure in encoding the Identifier dataset"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(
PatientRootQueryRetrieveInformationModelGet, ExplicitVRLittleEndian
)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
DATASET.PerimeterValue = b'\x00\x01'
with pytest.raises(ValueError):
next(assoc.send_c_get(
DATASET, PatientRootQueryRetrieveInformationModelGet)
)
assoc.release()
assert assoc.is_released
del DATASET.PerimeterValue # Fix up our changes
scp.shutdown()
def test_rsp_failure(self):
"""Test receiving a failure response"""
store_pname = []
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0x0000
def handle_get(event):
yield 1
yield 0xA701, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=True, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
assert assoc.is_established
for (status, ds) in assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet):
assert status.Status == 0xA701
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_success(self):
"""Test good send"""
store_pname = []
def handle_get(event):
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0x0000
scu_handler = [(evt.EVT_C_STORE, handle_store)]
scp_handler = [(evt.EVT_C_GET, handle_get)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(('', 11112), block=False, evt_handlers=scp_handler)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scp_role=True, scu_role=True)
assoc = ae.associate(
'localhost', 11112, evt_handlers=scu_handler, ext_neg=[role]
)
assert assoc.is_established
result = assoc.send_c_get(
self.ds, PatientRootQueryRetrieveInformationModelGet
)
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0x0000
assert ds is None
assoc.release()
assert assoc.is_released
assert store_pname == ['Test', 'Test']
scp.shutdown()
def test_rsp_pending_send_success(self):
"""Test receiving a pending response and sending success"""
store_pname = []
def handle_get(event):
yield 3
yield 0xFF00, self.good
yield 0xFF00, self.good
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0x0000
scu_handler = [(evt.EVT_C_STORE, handle_store)]
scp_handler = [(evt.EVT_C_GET, handle_get)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(('', 11112), block=False, evt_handlers=scp_handler)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scp_role=True, scu_role=True)
assoc = ae.associate(
'localhost', 11112, evt_handlers=scu_handler, ext_neg=[role]
)
assert assoc.is_established
result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet)
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0x0000
assert ds is None
assoc.release()
assert assoc.is_released
assert store_pname == ['Test', 'Test']
scp.shutdown()
def test_rsp_pending_send_failure(self):
"""Test receiving a pending response and sending a failure"""
store_pname = []
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0xA700
def handle_get(event):
yield 3
yield 0xFF00, self.good
yield 0xFF00, self.good
yield 0x0000, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=True, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
assert assoc.is_established
result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet)
# We have 2 status, ds and 1 success
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xB000
assert 'FailedSOPInstanceUIDList' in ds
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_pending_send_warning(self):
"""Test receiving a pending response and sending a warning"""
store_pname = []
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0xB007
def handle_get(event):
yield 3
yield 0xFF00, self.good
yield 0xFF00, self.good
yield 0xB000, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=True, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
assert assoc.is_established
result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet)
# We have 2 status, ds and 1 success
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xB000
assert 'FailedSOPInstanceUIDList' in ds
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_cancel(self):
"""Test receiving a cancel response"""
store_pname = []
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0x0000
def handle_get(event):
yield 1
yield 0xFE00, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=True, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
assert assoc.is_established
for (status, ds) in assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet):
assert status.Status == 0xFE00
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_warning(self):
"""Test receiving a warning response"""
store_pname = []
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0xB007
def handle_get(event):
yield 3
yield 0xFF00, self.good
yield 0xFF00, self.good
yield 0xB000, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=True, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
assert assoc.is_established
result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet)
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xff00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xb000
assert 'FailedSOPInstanceUIDList' in ds
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_rsp_unknown_status(self):
"""Test unknown status value returned by peer"""
store_pname = []
def handle_store(event):
store_pname.append(event.dataset.PatientName)
return 0x0000
def handle_get(event):
yield 1
yield 0xFFF0, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = build_role(CTImageStorage, scu_role=True, scp_role=True)
assoc = ae.associate(
'localhost', 11112, ext_neg=[role],
evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
assert assoc.is_established
for (status, ds) in assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet):
assert status.Status == 0xFFF0
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_connection_timeout(self):
"""Test the connection timing out"""
def handle(event):
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
hh = [(evt.EVT_C_GET, handle)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = SCP_SCU_RoleSelectionNegotiation()
role.sop_class_uid = CTImageStorage
role.scu_role = False
role.scp_role = True
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112, ext_neg=[role])
class DummyMessage():
is_valid_response = True
DataSet = None
Status = 0x0000
STATUS_OPTIONAL_KEYWORDS = []
class DummyDIMSE():
def send_msg(*args, **kwargs):
return
def get_msg(*args, **kwargs):
return None, None
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
results = assoc.send_c_get(
self.ds, PatientRootQueryRetrieveInformationModelGet
)
assert next(results) == (Dataset(), None)
with pytest.raises(StopIteration):
next(results)
assert assoc.is_aborted
scp.shutdown()
def test_decode_failure(self):
"""Test the connection timing out"""
def handle(event):
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
hh = [(evt.EVT_C_GET, handle)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(
PatientRootQueryRetrieveInformationModelGet, ExplicitVRLittleEndian
)
ae.add_requested_context(CTImageStorage)
role = SCP_SCU_RoleSelectionNegotiation()
role.sop_class_uid = CTImageStorage
role.scu_role = False
role.scp_role = True
assoc = ae.associate('localhost', 11112, ext_neg=[role])
class DummyMessage():
is_valid_response = True
DataSet = None
Status = 0x0000
STATUS_OPTIONAL_KEYWORDS = []
class DummyDIMSE():
def send_msg(*args, **kwargs):
return
def get_msg(*args, **kwargs):
def dummy(): pass
rsp = C_GET()
rsp.Status = 0xC000
rsp.MessageIDBeingRespondedTo = 1
rsp._dataset = dummy
return 1, rsp
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
results = assoc.send_c_get(
self.ds, PatientRootQueryRetrieveInformationModelGet
)
status, ds = next(results)
assert status.Status == 0xC000
assert ds is None
scp.shutdown()
def test_rsp_not_get(self, caplog):
"""Test receiving a non C-GET/C-STORE message in response."""
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
ae = AE()
assoc = Association(ae, 'requestor')
assoc._is_paused = True
dimse = assoc.dimse
dimse.msg_queue.put((3, C_FIND()))
cx = build_context(PatientRootQueryRetrieveInformationModelGet)
cx._as_scu = True
cx._as_scp = False
cx.context_id = 1
assoc._accepted_cx = {1 : cx}
identifier = Dataset()
identifier.PatientID = '*'
assoc.is_established = True
results = assoc.send_c_get(identifier, PatientRootQueryRetrieveInformationModelGet)
status, ds = next(results)
assert status == Dataset()
assert ds is None
with pytest.raises(StopIteration):
next(results)
assert (
'Received an unexpected C-FIND message from the peer'
) in caplog.text
assert assoc.is_aborted
def test_rsp_invalid_get(self, caplog):
"""Test receiving an invalid C-GET message in response."""
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
ae = AE()
assoc = Association(ae, 'requestor')
assoc._is_paused = True
dimse = assoc.dimse
dimse.msg_queue.put((3, C_GET()))
cx = build_context(PatientRootQueryRetrieveInformationModelGet)
cx._as_scu = True
cx._as_scp = False
cx.context_id = 1
assoc._accepted_cx = {1 : cx}
identifier = Dataset()
identifier.PatientID = '*'
assoc.is_established = True
results = assoc.send_c_get(identifier, PatientRootQueryRetrieveInformationModelGet)
status, ds = next(results)
assert status == Dataset()
assert ds is None
with pytest.raises(StopIteration):
next(results)
assert (
'Received an invalid C-GET response from the peer'
) in caplog.text
assert assoc.is_aborted
def test_query_uid_public(self):
"""Test using a public UID for the query model"""
def handle(event):
yield 0
yield 0x0000, None
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
responses = assoc.send_c_get(
self.ds, PatientRootQueryRetrieveInformationModelGet
)
for (status, ds) in responses:
assert status.Status == 0x0000
assert ds is None
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_query_uid_private(self, caplog):
"""Test using a private UID for the query model"""
def handle(event):
yield 0
yield 0x0000, None
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context('1.2.3.4')
scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle)]
)
ae.add_requested_context('1.2.3.4')
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
responses = assoc.send_c_get(self.ds, '1.2.3.4')
scp.shutdown()
msg = (
"No supported service class available for the SOP Class "
"UID '1.2.3.4'"
)
assert msg in caplog.text
class TestAssociationSendCMove(object):
"""Run tests on Assocation send_c_move."""
def setup(self):
"""Run prior to each test"""
self.ds = Dataset()
self.ds.PatientName = '*'
self.ds.QueryRetrieveLevel = "PATIENT"
self.good = Dataset()
self.good.file_meta = FileMetaDataset()
self.good.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian
self.good.SOPClassUID = CTImageStorage
self.good.SOPInstanceUID = '1.1.1'
self.good.PatientName = 'Test'
self.ae = None
def teardown(self):
"""Clear any active threads"""
if self.ae:
self.ae.shutdown()
def test_must_be_associated(self):
"""Test can't send without association."""
# Test raise if assoc not established
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assoc.release()
assert assoc.is_released
assert not assoc.is_established
with pytest.raises(RuntimeError):
next(assoc.send_c_move(
self.ds, b'TESTMOVE',
PatientRootQueryRetrieveInformationModelMove)
)
scp.shutdown()
def test_no_abstract_syntax_match(self):
"""Test when no accepted abstract syntax"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
next(assoc.send_c_move(
self.ds, b'TESTMOVE',
PatientRootQueryRetrieveInformationModelMove)
)
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_bad_query_model(self):
"""Test bad query model parameter"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
with pytest.raises(ValueError):
next(assoc.send_c_move(self.ds, b'TESTMOVE', query_model='X'))
assoc.release()
assert assoc.is_released
scp.shutdown()
def test_fail_encode_identifier(self):
"""Test a failure in encoding the Identifier dataset"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(
PatientRootQueryRetrieveInformationModelMove,
ExplicitVRLittleEndian
)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
DATASET.PerimeterValue = b'\x00\x01'
with pytest.raises(ValueError):
next(assoc.send_c_move(
DATASET, b'SOMEPLACE',
PatientRootQueryRetrieveInformationModelMove)
)
assoc.release()
assert assoc.is_released
del DATASET.PerimeterValue # Fix up our changes
scp.shutdown()
def test_move_destination_no_assoc(self):
"""Test move destination failed to assoc"""
# Move SCP
def handle_move(event):
yield 'localhost', 11113
yield 2
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_move(
self.ds, b'TESTMOVE',
PatientRootQueryRetrieveInformationModelMove):
assert status.Status == 0xa801
assoc.release()
assert assoc.is_released
move_scp.shutdown()
def test_move_destination_unknown(self):
"""Test unknown move destination"""
def handle_move(event):
yield None, None
yield 1
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_move(
self.ds, b'UNKNOWN',
PatientRootQueryRetrieveInformationModelMove):
assert status.Status == 0xa801
assoc.release()
assert assoc.is_released
move_scp.shutdown()
def test_move_destination_failed_store(self):
"""Test the destination AE returning failed status"""
def handle_store(event):
return 0xA700
def handle_move(event):
yield 'localhost', 11113
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_move(
self.ds, b'TESTMOVE', PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xFF00
(status, ds) = next(result)
assert status.Status == 0xFF00
(status, ds) = next(result)
assert status.Status == 0xB000
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_move_destination_warning_store(self):
"""Test the destination AE returning warning status"""
def handle_store(event):
return 0xB000
def handle_move(event):
yield 'localhost', 11113
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_move(
self.ds, b'TESTMOVE', PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xFF00
(status, ds) = next(result)
assert status.Status == 0xFF00
(status, ds) = next(result)
assert status.Status == 0xB000
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_rsp_failure(self):
"""Test the handler returning failure status"""
def handle_store(event):
return 0x0000
def handle_move(event):
yield 'localhost', 11113
yield 2
yield 0xC000, None
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_move(
self.ds, b'TESTMOVE', PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xC000
assert 'FailedSOPInstanceUIDList' in ds
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_rsp_warning(self):
"""Test receiving a warning response from the peer"""
def handle_store(event):
return 0xB007
def handle_move(event):
yield 'localhost', 11113
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_move(
self.ds, b'TESTMOVE', PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0xB000
assert 'FailedSOPInstanceUIDList' in ds
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_rsp_cancel(self):
"""Test the handler returning cancel status"""
def handle_store(event):
return 0x0000
def handle_move(event):
yield 'localhost', 11113
yield 2
yield 0xFE00, self.good
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
result = assoc.send_c_move(
self.ds, b'TESTMOVE', PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xFE00
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_rsp_success(self):
"""Test the handler returning success status"""
def handle_store(event):
return 0x0000
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
# Storage SCP
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
# Move SCP
def handle_move(event):
yield 'localhost', 11112
yield 2
yield 0xff00, self.good
ae.add_requested_context(CTImageStorage)
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove)
ae.add_supported_context(PatientStudyOnlyQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
# Move SCU
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_requested_context(StudyRootQueryRetrieveInformationModelMove)
ae.add_requested_context(PatientStudyOnlyQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11113)
assert assoc.is_established
result = assoc.send_c_move(
self.ds, b'TESTMOVE', PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0x0000
assert ds is None
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_rsp_unknown_status(self):
"""Test unknown status value returned by peer"""
def handle_store(event):
return 0xA700
def handle_move(event):
yield 'localhost', 11113
yield 2
yield 0xFFF0, self.good
yield 0xFF00, self.good
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
for (status, ds) in assoc.send_c_move(
self.ds, b'TESTMOVE',
PatientRootQueryRetrieveInformationModelMove):
assert status.Status == 0xFFF0
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_multiple_c_move(self):
"""Test multiple C-MOVE operation requests"""
def handle_store(event):
return 0x0000
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
# Storage SCP
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
# Move SCP
def handle_move(event):
yield 'localhost', 11112
yield 2
yield 0xff00, self.good
yield 0xff00, self.good
ae.add_requested_context(CTImageStorage)
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove)
ae.add_supported_context(PatientStudyOnlyQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
# Move SCU
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_requested_context(StudyRootQueryRetrieveInformationModelMove)
ae.add_requested_context(PatientStudyOnlyQueryRetrieveInformationModelMove)
for ii in range(20):
assoc = ae.associate('localhost', 11113)
assert assoc.is_established
assert not assoc.is_released
result = assoc.send_c_move(
self.ds,
b'TESTMOVE',
PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xFF00
(status, ds) = next(result)
assert status.Status == 0xFF00
(status, ds) = next(result)
assert status.Status == 0x0000
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
assert not assoc.is_established
store_scp.shutdown()
move_scp.shutdown()
def test_connection_timeout(self):
"""Test the connection timing out"""
def handle(event):
yield ('localhost', 11112)
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
hh = [(evt.EVT_C_MOVE, handle)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
class DummyMessage():
is_valid_response = True
Identifier = None
Status = 0x0000
STATUS_OPTIONAL_KEYWORDS = []
class DummyDIMSE():
def send_msg(*args, **kwargs):
return
def get_msg(*args, **kwargs):
return None, None
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
results = assoc.send_c_move(
self.ds, b'TEST', PatientRootQueryRetrieveInformationModelMove
)
assert next(results) == (Dataset(), None)
with pytest.raises(StopIteration):
next(results)
assert assoc.is_aborted
scp.shutdown()
def test_decode_failure(self):
"""Test the connection timing out"""
def handle(event):
yield ('localhost', 11112)
yield 2
yield 0xFF00, self.good
yield 0xFF00, self.good
hh = [(evt.EVT_C_MOVE, handle)]
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
scp = ae.start_server(('', 11112), block=False, evt_handlers=hh)
ae.add_requested_context(
PatientRootQueryRetrieveInformationModelMove,
ExplicitVRLittleEndian
)
ae.add_requested_context(CTImageStorage)
assoc = ae.associate('localhost', 11112)
class DummyMessage():
is_valid_response = True
DataSet = None
Status = 0x0000
STATUS_OPTIONAL_KEYWORDS = []
class DummyDIMSE():
def send_msg(*args, **kwargs):
return
def get_msg(*args, **kwargs):
def dummy(): pass
rsp = C_MOVE()
rsp.MessageIDBeingRespondedTo = 1
rsp.Status = 0xC000
rsp._dataset = dummy
return 1, rsp
assoc._reactor_checkpoint.clear()
while not assoc._is_paused:
time.sleep(0.01)
assoc.dimse = DummyDIMSE()
assert assoc.is_established
results = assoc.send_c_move(
self.ds, b'TEST', PatientRootQueryRetrieveInformationModelMove
)
status, ds = next(results)
assert status.Status == 0xC000
assert ds is None
scp.shutdown()
def test_rsp_not_move(self, caplog):
"""Test receiving a non C-MOVE/C-STORE message in response."""
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
ae = AE()
assoc = Association(ae, 'requestor')
assoc._is_paused = True
dimse = assoc.dimse
dimse.msg_queue.put((3, C_FIND()))
cx = build_context(PatientRootQueryRetrieveInformationModelMove)
cx._as_scu = True
cx._as_scp = False
cx.context_id = 1
assoc._accepted_cx = {1 : cx}
identifier = Dataset()
identifier.PatientID = '*'
assoc.is_established = True
results = assoc.send_c_move(
identifier, b'A', PatientRootQueryRetrieveInformationModelMove
)
status, ds = next(results)
assert status == Dataset()
assert ds is None
with pytest.raises(StopIteration):
next(results)
assert (
'Received an unexpected C-FIND message from the peer'
) in caplog.text
assert assoc.is_aborted
def test_rsp_invalid_move(self, caplog):
"""Test receiving an invalid C-MOVE message in response."""
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
ae = AE()
assoc = Association(ae, 'requestor')
assoc._is_paused = True
dimse = assoc.dimse
dimse.msg_queue.put((3, C_MOVE()))
cx = build_context(PatientRootQueryRetrieveInformationModelMove)
cx._as_scu = True
cx._as_scp = False
cx.context_id = 1
assoc._accepted_cx = {1 : cx}
identifier = Dataset()
identifier.PatientID = '*'
assoc.is_established = True
results = assoc.send_c_move(
identifier, b'A', PatientRootQueryRetrieveInformationModelMove
)
status, ds = next(results)
assert status == Dataset()
assert ds is None
with pytest.raises(StopIteration):
next(results)
assert (
'Received an invalid C-MOVE response from the peer'
) in caplog.text
assert assoc.is_aborted
def test_query_uid_public(self):
"""Test using a public UID for the query model"""
def handle_store(event):
return 0x0000
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
# Storage SCP
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
# Move SCP
def handle_move(event):
yield 'localhost', 11112
yield 2
yield 0xff00, self.good
ae.add_requested_context(CTImageStorage)
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove)
ae.add_supported_context(PatientStudyOnlyQueryRetrieveInformationModelMove)
move_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
# Move SCU
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_requested_context(StudyRootQueryRetrieveInformationModelMove)
ae.add_requested_context(PatientStudyOnlyQueryRetrieveInformationModelMove)
assoc = ae.associate('localhost', 11113)
assert assoc.is_established
result = assoc.send_c_move(
self.ds, b'TESTMOVE', PatientRootQueryRetrieveInformationModelMove
)
(status, ds) = next(result)
assert status.Status == 0xFF00
assert ds is None
(status, ds) = next(result)
assert status.Status == 0x0000
assert ds is None
with pytest.raises(StopIteration):
next(result)
assoc.release()
assert assoc.is_released
store_scp.shutdown()
move_scp.shutdown()
def test_query_uid_private(self, caplog):
"""Test using a private UID for the query model"""
def handle_store(event):
return 0x0000
def handle_move(event):
yield 'localhost', 11112
yield 2
yield 0xff00, self.good
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
# Storage SCP
ae.add_supported_context(CTImageStorage)
store_scp = ae.start_server(
('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)]
)
ae.add_requested_context(CTImageStorage)
ae.add_supported_context('1.2.3.4')
move_scp = ae.start_server(
('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)]
)
# Move SCU
ae.add_requested_context('1.2.3.4')
assoc = ae.associate('localhost', 11113)
assert assoc.is_established
result = assoc.send_c_move(self.ds, b'TESTMOVE', '1.2.3.4')
store_scp.shutdown()
move_scp.shutdown()
msg = (
"No supported service class available for the SOP Class "
"UID '1.2.3.4'"
)
assert msg in caplog.text
class TestGetValidContext(object):
"""Tests for Association._get_valid_context."""
def setup(self):
"""Run prior to each test"""
self.ae = None
def teardown(self):
"""Clear any active threads"""
if self.ae:
self.ae.shutdown()
def test_id_no_abstract_syntax_match(self):
"""Test exception raised if with ID no abstract syntax match"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(CTImageStorage, '', 'scu', context_id=1)
assoc.release()
scp.shutdown()
def test_id_transfer_syntax(self):
"""Test match with context ID."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage)
ae.add_supported_context(
CTImageStorage, [ExplicitVRLittleEndian, JPEGBaseline]
)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage)
ae.add_requested_context(CTImageStorage, JPEGBaseline)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
# Uncompressed accepted, different uncompressed sent
cx = assoc._get_valid_context(
CTImageStorage, '', 'scu', context_id=3
)
assert cx.context_id == 3
assert cx.abstract_syntax == CTImageStorage
assert cx.transfer_syntax[0] == ImplicitVRLittleEndian
assert cx.as_scu is True
cx = assoc._get_valid_context(
CTImageStorage, '', 'scu', context_id=5
)
assert cx.context_id == 5
assert cx.abstract_syntax == CTImageStorage
assert cx.transfer_syntax[0] == JPEGBaseline
assert cx.as_scu is True
assoc.release()
scp.shutdown()
def test_id_no_transfer_syntax(self):
"""Test exception raised if with ID no transfer syntax match."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(CTImageStorage, JPEGBaseline)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage, JPEGBaseline)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
# Confirm otherwise OK
cx = assoc._get_valid_context(
'1.2.840.10008.1.1', '', 'scu', context_id=1
)
assert cx.context_id == 1
assert cx.transfer_syntax[0] == ImplicitVRLittleEndian
# Uncompressed accepted, compressed sent
msg = (
r"No presentation context for 'Verification SOP Class' has been "
r"accepted by the peer with 'JPEG Baseline \(Process 1\)' "
r"transfer syntax for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
'1.2.840.10008.1.1', JPEGBaseline, 'scu', context_id=1
)
# Compressed (JPEGBaseline) accepted, uncompressed sent
# Confirm otherwise OK
cx = assoc._get_valid_context(
CTImageStorage, JPEGBaseline, 'scu', context_id=3
)
assert cx.context_id == 3
assert cx.transfer_syntax[0] == JPEGBaseline
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer with 'Implicit VR Little Endian' "
r"transfer syntax for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage, ImplicitVRLittleEndian, 'scu', context_id=3
)
# Compressed (JPEGBaseline) accepted, compressed (JPEG2000) sent
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer with 'JPEG 2000 Image Compression' "
r"transfer syntax for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage, JPEG2000, 'scu', context_id=3
)
assoc.release()
scp.shutdown()
def test_id_no_role_scp(self):
"""Test exception raised if with ID no role match."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(CTImageStorage, JPEGBaseline)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage, JPEGBaseline)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
# Confirm matching otherwise OK
cx = assoc._get_valid_context(
'1.2.840.10008.1.1', '', 'scu', context_id=1
)
assert cx.context_id == 1
assert cx.as_scu is True
# Any transfer syntax
msg = (
r"No presentation context for 'Verification SOP Class' has been "
r"accepted by the peer "
r"for the SCP role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
'1.2.840.10008.1.1', '', 'scp', context_id=1
)
# Transfer syntax used
msg = (
r"No presentation context for 'Verification SOP Class' has been "
r"accepted by the peer "
r"with 'Implicit VR Little Endian' transfer syntax "
r"for the SCP role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
'1.2.840.10008.1.1', ImplicitVRLittleEndian,
'scp', context_id=1
)
assoc.release()
scp.shutdown()
def test_id_no_role_scu(self):
"""Test exception raised if with ID no role match."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = SCP_SCU_RoleSelectionNegotiation()
role.sop_class_uid = CTImageStorage
role.scu_role = False
role.scp_role = True
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112, ext_neg=[role])
assert assoc.is_established
# Confirm matching otherwise OK
cx = assoc._get_valid_context(
CTImageStorage, '', 'scp', context_id=3
)
assert cx.context_id == 3
assert cx.as_scp is True
# Any transfer syntax
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage, '', 'scu', context_id=3
)
# Transfer syntax used
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer "
r"with 'Implicit VR Little Endian' transfer syntax "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage, ImplicitVRLittleEndian, 'scu', context_id=3
)
assoc.release()
scp.shutdown()
def test_no_id_no_abstract_syntax_match(self):
"""Test exception raised if no abstract syntax match"""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
# Test otherwise OK
assoc._get_valid_context(VerificationSOPClass, '', 'scu')
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(CTImageStorage, '', 'scu')
assoc.release()
scp.shutdown()
def test_no_id_transfer_syntax(self):
"""Test match."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(CTImageStorage, JPEGBaseline)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage, JPEGBaseline)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
# Uncompressed accepted, different uncompressed sent
cx = assoc._get_valid_context(
'1.2.840.10008.1.1', ExplicitVRLittleEndian, 'scu'
)
assert cx.context_id == 1
assert cx.abstract_syntax == VerificationSOPClass
assert cx.transfer_syntax[0] == ImplicitVRLittleEndian
assert cx.as_scu is True
assoc.release()
scp.shutdown()
def test_no_id_no_transfer_syntax(self):
"""Test exception raised if no transfer syntax match."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(CTImageStorage, JPEGBaseline)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage, JPEGBaseline)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
# Confirm otherwise OK
cx = assoc._get_valid_context('1.2.840.10008.1.1', '', 'scu')
assert cx.context_id == 1
assert cx.transfer_syntax[0] == ImplicitVRLittleEndian
# Uncompressed accepted, compressed sent
msg = (
r"No presentation context for 'Verification SOP Class' has been "
r"accepted by the peer "
r"with 'JPEG Baseline \(Process 1\)' transfer syntax "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context('1.2.840.10008.1.1', JPEGBaseline, 'scu')
# Compressed (JPEGBaseline) accepted, uncompressed sent
# Confirm otherwise OK
cx = assoc._get_valid_context(CTImageStorage, JPEGBaseline, 'scu')
assert cx.context_id == 3
assert cx.transfer_syntax[0] == JPEGBaseline
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer "
r"with 'Implicit VR Little Endian' transfer syntax "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage, ImplicitVRLittleEndian, 'scu'
)
# Compressed (JPEGBaseline) accepted, compressed (JPEG2000) sent
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer "
r"with 'JPEG 2000 Image Compression' transfer syntax "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(CTImageStorage, JPEG2000, 'scu')
assoc.release()
scp.shutdown()
def test_no_id_no_role_scp(self):
"""Test exception raised if no role match."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(CTImageStorage, JPEGBaseline)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(VerificationSOPClass)
ae.add_requested_context(CTImageStorage, JPEGBaseline)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
# Confirm matching otherwise OK
cx = assoc._get_valid_context('1.2.840.10008.1.1', '', 'scu')
assert cx.context_id == 1
assert cx.as_scu is True
# Any transfer syntax
msg = (
r"No presentation context for 'Verification SOP Class' has been "
r"accepted by the peer "
r"for the SCP role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context('1.2.840.10008.1.1', '', 'scp')
# Transfer syntax used
msg = (
r"No presentation context for 'Verification SOP Class' has been "
r"accepted by the peer "
r"with 'Implicit VR Little Endian' transfer syntax "
r"for the SCP role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
'1.2.840.10008.1.1', ImplicitVRLittleEndian, 'scp'
)
assoc.release()
scp.shutdown()
def test_no_id_no_role_scu(self):
"""Test exception raised if no role match."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet)
ae.add_requested_context(CTImageStorage)
role = SCP_SCU_RoleSelectionNegotiation()
role.sop_class_uid = CTImageStorage
role.scu_role = False
role.scp_role = True
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112, ext_neg=[role])
assert assoc.is_established
# Confirm matching otherwise OK
cx = assoc._get_valid_context(CTImageStorage, '', 'scp')
assert cx.context_id == 3
assert cx.as_scp is True
# Any transfer syntax
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(CTImageStorage, '', 'scu')
# Transfer syntax used
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer "
r"with 'Implicit VR Little Endian' transfer syntax "
r"for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage, ImplicitVRLittleEndian, 'scu'
)
assoc.release()
scp.shutdown()
def test_implicit_explicit(self):
"""Test matching when both implicit and explicit are available."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian)
ae.add_supported_context(CTImageStorage, ExplicitVRLittleEndian)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian)
ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
cx = assoc._get_valid_context(
CTImageStorage, ExplicitVRLittleEndian, 'scu'
)
assert cx.context_id == 3
assert cx.abstract_syntax == CTImageStorage
assert cx.transfer_syntax[0] == ExplicitVRLittleEndian
assert cx.as_scu is True
cx = assoc._get_valid_context(
CTImageStorage, ImplicitVRLittleEndian, 'scu'
)
assert cx.context_id == 1
assert cx.abstract_syntax == CTImageStorage
assert cx.transfer_syntax[0] == ImplicitVRLittleEndian
assert cx.as_scu is True
assoc.release()
scp.shutdown()
def test_explicit_implicit(self):
"""Test matching when both implicit and explicit are available."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(CTImageStorage, ExplicitVRLittleEndian)
ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian)
ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
cx = assoc._get_valid_context(
CTImageStorage, ExplicitVRLittleEndian, 'scu'
)
assert cx.context_id == 1
assert cx.abstract_syntax == CTImageStorage
assert cx.transfer_syntax[0] == ExplicitVRLittleEndian
assert cx.as_scu is True
cx = assoc._get_valid_context(
CTImageStorage, ImplicitVRLittleEndian, 'scu'
)
assert cx.context_id == 3
assert cx.abstract_syntax == CTImageStorage
assert cx.transfer_syntax[0] == ImplicitVRLittleEndian
assert cx.as_scu is True
assoc.release()
scp.shutdown
def test_little_big(self):
"""Test no match from little to big endian."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(VerificationSOPClass)
ae.add_supported_context(MRImageStorage, ExplicitVRLittleEndian)
ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(MRImageStorage, ExplicitVRBigEndian)
ae.add_requested_context(MRImageStorage, ExplicitVRLittleEndian)
ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian)
ae.acse_timeout = 5
ae.dimse_timeout = 5
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No presentation context for 'MR Image Storage' has been "
r"accepted by the peer with 'Explicit VR Big Endian' transfer "
r"syntax for the SCU role"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
MRImageStorage, ExplicitVRBigEndian, 'scu'
)
assoc.release()
scp.shutdown()
def test_ups_push_action(self, caplog):
"""Test matching UPS Push to other UPS contexts."""
def handle(event, cx):
cx.append(event.context)
return 0x0000, None
self.ae = ae = AE()
ae.network_timeout = 5
ae.dimse_timeout = 5
ae.acse_timeout = 5
ae.add_supported_context(UnifiedProcedureStepPullSOPClass)
contexts = []
handlers = [(evt.EVT_N_ACTION, handle, [contexts])]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
ae.add_requested_context(UnifiedProcedureStepPullSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No exact matching context found for 'Unified Procedure Step "
r"- Push SOP Class', checking accepted contexts for other UPS "
r"SOP classes"
)
ds = Dataset()
ds.TransactionUID = '1.2.3.4'
with caplog.at_level(logging.DEBUG, logger='pynetdicom'):
status, rsp = assoc.send_n_action(
ds, 1, UnifiedProcedureStepPushSOPClass, '1.2.3'
)
assert msg in caplog.text
assoc.release()
assert contexts[0].abstract_syntax == UnifiedProcedureStepPullSOPClass
scp.shutdown()
def test_ups_push_get(self, caplog):
"""Test matching UPS Push to other UPS contexts."""
self.ae = ae = AE()
ae.network_timeout = 5
ae.dimse_timeout = 5
ae.acse_timeout = 5
ae.add_supported_context(UnifiedProcedureStepPullSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(UnifiedProcedureStepPullSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No exact matching context found for 'Unified Procedure Step "
r"- Push SOP Class', checking accepted contexts for other UPS "
r"SOP classes"
)
with caplog.at_level(logging.DEBUG, logger='pynetdicom'):
status, rsp = assoc.send_n_get(
[0x00100010], UnifiedProcedureStepPushSOPClass, '1.2.3'
)
assert msg in caplog.text
assoc.release()
scp.shutdown()
def test_ups_push_set(self, caplog):
"""Test matching UPS Push to other UPS contexts."""
self.ae = ae = AE()
ae.network_timeout = 5
ae.dimse_timeout = 5
ae.acse_timeout = 5
ae.add_supported_context(UnifiedProcedureStepPullSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(UnifiedProcedureStepPullSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No exact matching context found for 'Unified Procedure Step "
r"- Push SOP Class', checking accepted contexts for other UPS "
r"SOP classes"
)
ds = Dataset()
ds.TransactionUID = '1.2.3.4'
with caplog.at_level(logging.DEBUG, logger='pynetdicom'):
status, rsp = assoc.send_n_set(
ds, UnifiedProcedureStepPushSOPClass, '1.2.3'
)
assert msg in caplog.text
assoc.release()
scp.shutdown()
def test_ups_push_er(self, caplog):
"""Test matching UPS Push to other UPS contexts."""
self.ae = ae = AE()
ae.network_timeout = 5
ae.dimse_timeout = 5
ae.acse_timeout = 5
ae.add_supported_context(UnifiedProcedureStepPullSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(UnifiedProcedureStepPullSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No exact matching context found for 'Unified Procedure Step "
r"- Push SOP Class', checking accepted contexts for other UPS "
r"SOP classes"
)
ds = Dataset()
ds.TransactionUID = '1.2.3.4'
with caplog.at_level(logging.DEBUG, logger='pynetdicom'):
status, rsp = assoc.send_n_event_report(
ds, 1, UnifiedProcedureStepPushSOPClass, '1.2.3'
)
assert msg in caplog.text
assoc.release()
scp.shutdown()
def test_ups_push_find(self, caplog):
"""Test matching UPS Push to other UPS contexts."""
self.ae = ae = AE()
ae.network_timeout = 5
ae.dimse_timeout = 5
ae.acse_timeout = 5
ae.add_supported_context(UnifiedProcedureStepPullSOPClass)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(UnifiedProcedureStepPullSOPClass)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No exact matching context found for 'Unified Procedure Step "
r"- Push SOP Class', checking accepted contexts for other UPS "
r"SOP classes"
)
ds = Dataset()
ds.TransactionUID = '1.2.3.4'
with caplog.at_level(logging.DEBUG, logger='pynetdicom'):
responses = assoc.send_c_find(ds, UnifiedProcedureStepPushSOPClass)
assert msg in caplog.text
assoc.release()
scp.shutdown()
def test_allow_conversion(self):
"""Test allow_conversion=False."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian)
ae.add_supported_context(CTImageStorage, ExplicitVRLittleEndian)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian)
#ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer with 'Explicit VR"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage,
ExplicitVRLittleEndian,
'scu',
allow_conversion=False
)
assoc.release()
scp.shutdown()
class TestEventHandlingAcceptor(object):
"""Test the transport events and handling as acceptor."""
def setup(self):
self.ae = None
_config.LOG_HANDLER_LEVEL = 'none'
def teardown(self):
if self.ae:
self.ae.shutdown()
_config.LOG_HANDLER_LEVEL = 'standard'
def test_no_handlers(self):
"""Test with no association event handlers bound."""
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
scp.shutdown()
def test_no_handlers_unbind(self):
"""Test unbinding a handler that's not bound."""
_config.LOG_HANDLER_LEVEL = 'standard'
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assert dummy not in scp._handlers[evt.EVT_DIMSE_SENT]
scp.unbind(evt.EVT_DIMSE_SENT, dummy)
assert dummy not in scp._handlers[evt.EVT_DIMSE_SENT]
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert dummy not in assoc._handlers[evt.EVT_DIMSE_SENT]
assoc.unbind(evt.EVT_DIMSE_SENT, dummy)
assert dummy not in assoc._handlers[evt.EVT_DIMSE_SENT]
child = scp.active_associations[0]
assert dummy not in child._handlers[evt.EVT_DIMSE_SENT]
child.unbind(evt.EVT_DIMSE_SENT, dummy)
assert dummy not in child._handlers[evt.EVT_DIMSE_SENT]
assoc.release()
scp.shutdown()
def test_unbind_intervention(self):
"""Test unbinding a user intervention handler."""
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
scp.bind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) == (dummy, None)
scp.unbind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) != (dummy, None)
assert scp.get_handlers(evt.EVT_C_ECHO) == (evt._c_echo_handler, None)
scp.shutdown()
def test_unbind_intervention_assoc(self):
"""Test unbinding a user intervention handler."""
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
scp.bind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) == (dummy, None)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_C_ECHO) == (dummy, None)
scp.unbind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) != (dummy, None)
assert scp.get_handlers(evt.EVT_C_ECHO) == (evt._c_echo_handler, None)
assert child.get_handlers(evt.EVT_C_ECHO) != (dummy, None)
assert child.get_handlers(evt.EVT_C_ECHO) == (
evt._c_echo_handler, None
)
assoc.release()
scp.shutdown()
def test_abort(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_bind(self):
"""Test binding a handler to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
scp.bind(evt.EVT_ABORTED, handle)
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
scp.unbind(evt.EVT_ABORTED, handle)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 0
scp.shutdown()
def test_abort_local(self):
"""Test the handler bound to EVT_ABORTED with local requested abort."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
scp.active_associations[0].abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_raises(self, caplog):
"""Test the handler for EVT_ACCEPTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_ABORTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_accept(self):
"""Test starting with handler bound to EVT_ACCEPTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ACCEPTED'
scp.shutdown()
def test_accept_bind(self):
"""Test binding a handler to EVT_ACCEPTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert len(triggered) == 0
scp.bind(evt.EVT_ACCEPTED, handle)
assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assoc2 = ae.associate('localhost', 11112)
assoc.release()
assoc2.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
assert triggered[0].event.name == 'EVT_ACCEPTED'
scp.shutdown()
def test_accept_unbind(self):
"""Test starting with handler bound to EVT_ACCEPTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assert len(triggered) == 1
assert triggered[0].event.name == "EVT_ACCEPTED"
scp.unbind(evt.EVT_ACCEPTED, handle)
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assoc2 = ae.associate('localhost', 11112)
assoc.release()
assoc2.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
scp.shutdown()
def test_accept_raises(self, caplog):
"""Test the handler for EVT_ACCEPTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_ACCEPTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_release(self):
"""Test starting with handler bound to EVT_RELEASED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_RELEASED'
scp.shutdown()
def test_release_bind(self):
"""Test binding a handler to EVT_RELEASED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_RELEASED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_RELEASED) == []
scp.bind(evt.EVT_RELEASED, handle)
assert scp.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_RELEASED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_RELEASED'
scp.shutdown()
def test_release_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
scp.unbind(evt.EVT_RELEASED, handle)
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_RELEASED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 0
scp.shutdown()
def test_release_local(self):
"""Test the handler bound to EVT_RELEASED with local requested abort."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
scp.active_associations[0].release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_RELEASED'
scp.shutdown()
def test_release_raises(self, caplog):
"""Test the handler for EVT_RELEASED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.release()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_RELEASED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_established(self):
"""Test starting with handler bound to EVT_ESTABLISHED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ESTABLISHED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)]
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ESTABLISHED'
scp.shutdown()
def test_established_bind(self):
"""Test binding a handler to EVT_ESTABLISHED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ESTABLISHED, handle)]
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
scp.bind(evt.EVT_ESTABLISHED, handle)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)]
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ESTABLISHED'
scp.shutdown()
def test_established_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ESTABLISHED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
scp.unbind(evt.EVT_ESTABLISHED, handle)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 0
scp.shutdown()
def test_established_raises(self, caplog):
"""Test the handler for EVT_ESTABLISHED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ESTABLISHED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.release()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_ESTABLISHED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_requested(self):
"""Test starting with handler bound to EVT_REQUESTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REQUESTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == [(handle, None)]
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == [(handle, None)]
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_REQUESTED'
scp.shutdown()
def test_requested_bind(self):
"""Test binding a handler to EVT_REQUESTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REQUESTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_REQUESTED) == []
scp.bind(evt.EVT_REQUESTED, handle)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_REQUESTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_REQUESTED) == [(handle, None)]
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_REQUESTED'
scp.shutdown()
def test_requested_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REQUESTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
scp.unbind(evt.EVT_REQUESTED, handle)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 0
scp.shutdown()
def test_requested_raises(self, caplog):
"""Test the handler for EVT_REQUESTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REQUESTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.release()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_REQUESTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_rejected(self):
"""Test starting with handler bound to EVT_REJECTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.require_called_aet = True
ae.add_supported_context(CTImageStorage)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REJECTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_rejected
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_REJECTED'
scp.shutdown()
def test_rejected_bind(self):
"""Test binding a handler to EVT_REJECTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.require_called_aet = True
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REJECTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_REJECTED) == []
scp.bind(evt.EVT_REJECTED, handle)
assoc = ae.associate('localhost', 11112)
assert assoc.is_rejected
assert scp.get_handlers(evt.EVT_REJECTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_REJECTED'
scp.shutdown()
def test_rejected_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.require_called_aet = True
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REJECTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
scp.unbind(evt.EVT_REJECTED, handle)
assoc = ae.associate('localhost', 11112)
assert assoc.is_rejected
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assoc.release()
assert len(triggered) == 0
scp.shutdown()
def test_rejected_raises(self, caplog):
"""Test the handler for EVT_REJECTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.require_called_aet = True
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REJECTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112)
assert assoc.is_rejected
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_REJECTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_optional_args(self):
"""Test passing optional arguments to the handler."""
arguments = []
def handle(event, *args):
arguments.append(args)
args = ['a', 1, {'test': 1}]
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle, args)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, args)]
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, args)]
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
assert len(arguments) == 1
assert args == list(arguments[0])
def test_optional_args_intervention(self):
"""Test passing optional arguments to the handler."""
arguments = []
def handle_echo(event, *args):
arguments.append(args)
return 0x0000
args = ['a', 1, {'test': 1}]
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_C_ECHO, handle_echo, args)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_C_ECHO) == (handle_echo, args)
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_C_ECHO) == (handle_echo, args)
status = assoc.send_c_echo()
assert status.Status == 0x0000
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
assert len(arguments) == 1
assert args == list(arguments[0])
class TestEventHandlingRequestor(object):
"""Test the transport events and handling as acceptor."""
def setup(self):
self.ae = None
_config.LOG_HANDLER_LEVEL = 'none'
def teardown(self):
if self.ae:
self.ae.shutdown()
_config.LOG_HANDLER_LEVEL = 'standard'
def test_no_handlers(self):
"""Test with no association event handlers bound."""
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
scp.shutdown()
def test_unbind_not_event(self):
"""Test unbind a handler if no events bound."""
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == []
assoc.unbind(evt.EVT_DIMSE_SENT, dummy)
assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == []
assoc.release()
scp.shutdown()
def test_unbind_notification_none(self):
"""Test unbinding a handler thats not bound."""
def dummy(event):
pass
def dummy2(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assoc.bind(evt.EVT_DIMSE_SENT, dummy)
assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == [(dummy, None)]
assoc.unbind(evt.EVT_DIMSE_SENT, dummy2)
assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == [(dummy, None)]
assoc.release()
scp.shutdown()
def test_unbind_intervention(self):
"""Test unbinding a user intervention handler."""
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assoc.bind(evt.EVT_C_ECHO, dummy)
assert assoc.get_handlers(evt.EVT_C_ECHO) == (dummy, None)
assoc.unbind(evt.EVT_C_ECHO, dummy)
assert assoc.get_handlers(evt.EVT_C_ECHO) != (dummy, None)
assert assoc.get_handlers(evt.EVT_C_ECHO) == (
evt._c_echo_handler, None
)
assoc.release()
scp.shutdown()
def test_abort(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_bind(self):
"""Test binding a handler to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.bind(evt.EVT_ABORTED, handle)
assert assoc.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.unbind(evt.EVT_ABORTED, handle)
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 0
scp.shutdown()
def test_abort_remote(self):
"""Test the handler bound to EVT_ABORTED with local requested abort."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
scp.active_associations[0].abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_raises(self, caplog):
"""Test the handler for EVT_ACCEPTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_ABORTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_accept(self):
"""Test starting with handler bound to EVT_ACCEPTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ACCEPTED'
scp.shutdown()
def test_accept_raises(self, caplog):
"""Test the handler for EVT_ACCEPTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle)]
scp = ae.start_server(('', 11112), block=False)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_ACCEPTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_release(self):
"""Test starting with handler bound to EVT_RELEASED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_RELEASED'
scp.shutdown()
def test_release_bind(self):
"""Test binding a handler to EVT_RELEASED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assoc.bind(evt.EVT_RELEASED, handle)
assert assoc.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_RELEASED'
scp.shutdown()
def test_release_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_RELEASED) == [(handle, None)]
assoc.unbind(evt.EVT_RELEASED, handle)
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 0
scp.shutdown()
def test_release_remote(self):
"""Test the handler bound to EVT_RELEASED with local requested abort."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
scp.active_associations[0].release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_RELEASED'
scp.shutdown()
def test_release_raises(self, caplog):
"""Test the handler for EVT_RELEASED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_RELEASED, handle)]
scp = ae.start_server(('', 11112), block=False)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assoc.release()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_RELEASED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_established(self):
"""Test starting with handler bound to EVT_ESTABLISHED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ESTABLISHED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ESTABLISHED'
scp.shutdown()
def test_established_raises(self, caplog):
"""Test the handler for EVT_ESTABLISHED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ESTABLISHED, handle)]
scp = ae.start_server(('', 11112), block=False)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assoc.release()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_ESTABLISHED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_requested(self):
"""Test starting with handler bound to EVT_REQUESTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REQUESTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == [(handle, None)]
assoc.release()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_REQUESTED'
scp.shutdown()
def test_requested_raises(self, caplog):
"""Test the handler for EVT_REQUESTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REQUESTED, handle)]
scp = ae.start_server(('', 11112), block=False)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assoc.release()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_REQUESTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_rejected(self):
"""Test starting with handler bound to EVT_REJECTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.require_called_aet = True
ae.add_supported_context(CTImageStorage)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REJECTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_rejected
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == [(handle, None)]
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_REJECTED'
scp.shutdown()
def test_rejected_raises(self, caplog):
"""Test the handler for EVT_REJECTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.require_called_aet = True
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_REJECTED, handle)]
scp = ae.start_server(('', 11112), block=False)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_rejected
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_REJECTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_optional_args(self):
"""Test passing optional arguments to the handler."""
arguments = []
def handle(event, *args):
arguments.append(args)
args = ['a', 1, {'test': 1}]
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle, args)]
scp = ae.start_server(('', 11112), block=False)
assoc = ae.associate('localhost', 11112, evt_handlers=handlers)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert assoc.get_handlers(evt.EVT_ACCEPTED) == [(handle, args)]
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
assert len(arguments) == 1
assert args == list(arguments[0])
| 34.029613 | 99 | 0.617813 | 25,842 | 233,273 | 5.382981 | 0.021515 | 0.025362 | 0.05334 | 0.048761 | 0.942009 | 0.931089 | 0.923138 | 0.91224 | 0.902744 | 0.893967 | 0 | 0.025057 | 0.284195 | 233,273 | 6,854 | 100 | 34.034578 | 0.808029 | 0.049603 | 0 | 0.877473 | 0 | 0 | 0.043533 | 0.001714 | 0 | 0 | 0.005433 | 0 | 0.219368 | 1 | 0.075533 | false | 0.002473 | 0.004186 | 0.009893 | 0.097983 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6abdddfcf91ea7eb291718507d5ba47b054f4c4f | 23,761 | py | Python | geniepy/tests/testdata.py | geralddzx/genie | a21d7e9397a24b200ac04806501ceb64685408ec | [
"MIT"
] | 1 | 2020-04-10T21:35:05.000Z | 2020-04-10T21:35:05.000Z | geniepy/tests/testdata.py | geralddzx/genie | a21d7e9397a24b200ac04806501ceb64685408ec | [
"MIT"
] | null | null | null | geniepy/tests/testdata.py | geralddzx/genie | a21d7e9397a24b200ac04806501ceb64685408ec | [
"MIT"
] | 2 | 2020-04-09T04:42:30.000Z | 2020-04-10T21:35:59.000Z | """Module with data used in tests."""
# flake8: noqa
# pylint: skip-file
import pandas as pd
PUBMED_INVALID_SCHEMA = [
pd.DataFrame(
{
# Missing required column
"date_completed": ["--"],
"pub_model": ["Print-Electronic"],
"title": ["Biochemia medica"],
"iso_abbreviation": ["Biochem Med (Zagreb)"],
"article_title": [
"High anion gap metabolic acidosis caused by D-lactate: mind the time of blood collection." # NOQA
],
"abstract": [
"D-lactic acidosis is an uncommon cause of high anion gap acidosis."
],
"authors": ["Weemaes, Matthias, Hiele, Martin, Vermeersch, Pieter"],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
}
),
pd.DataFrame(
{
"pmid": 31839729,
"not column": [""], # Invalid Column
"pub_model": ["Print-Electronic"],
"title": ["Biochemia medica"],
"iso_abbreviation": ["Biochem Med (Zagreb)"],
"article_title": [
"Unexpected abnormal coagulation test results in a 2-year-old child: A case report."
],
"abstract": [
"Rejection of the sample with repeated blood withdrawal is always an unwanted consequence of sample nonconformity and preanalytical errors, especially in the most vulnerable population - children. Here is presented a case with unexpected abnormal coagulation test results in a 2-year-old child with no previously documented coagulation disorder. Child is planned for tympanostomy tubes removal under the anaesthesia driven procedure, and preoperative coagulation tests revealed prolonged prothrombin time, activated partial thromboplastin time and thrombin time, with fibrinogen and antithrombin within reference intervals. From the anamnestic and clinical data, congenital coagulation disorder was excluded, and with further investigation, sample mismatch, clot presence and accidental ingestion of oral anticoagulant, heparin contamination or vitamin K deficiency were excluded too. Due to suspected EDTA carryover during blood sampling another sample was taken the same day and all tests were performed again. The results for all tests were within reference intervals confirming EDTA effect on falsely prolongation of the coagulation times in the first sample. This case can serve as alert to avoid unnecessary loss in terms of blood withdrawal repetitions and discomfort of the patients and their relatives, tests repeating, prolonging medical procedures, and probably delaying diagnosis or proper medical treatment. It is the responsibility of the laboratory specialists to continuously educate laboratory staff and other phlebotomists on the correct blood collection as well as on its importance for the patient's safety."
],
"authors": [
"Banković Radovanović, Patricija, Živković Mikulčić, Tanja, Simović Medica, Jasmina"
],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
}
),
]
PUBMED_INVALID_DF = [
pd.DataFrame(
{
"pmid": ["31839728"], # pmid as string
"date_completed": ["--"],
"pub_model": ["Print-Electronic"],
"title": ["Biochemia medica"],
"iso_abbreviation": ["Biochem Med (Zagreb)"],
"article_title": [
"High anion gap metabolic acidosis caused by D-lactate: mind the time of blood collection."
],
"abstract": [
"D-lactic acidosis is an uncommon cause of high anion gap acidosis."
],
"authors": ["Weemaes, Matthias, Hiele, Martin, Vermeersch, Pieter"],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
}
),
pd.DataFrame(
{
"pmid": 31839729,
# missing column
"pub_model": ["Print-Electronic"],
"title": ["Biochemia medica"],
"iso_abbreviation": ["Biochem Med (Zagreb)"],
"article_title": [
"Unexpected abnormal coagulation test results in a 2-year-old child: A case report."
],
"abstract": [
"Rejection of the sample with repeated blood withdrawal is always an unwanted consequence of sample nonconformity and preanalytical errors, especially in the most vulnerable population - children. Here is presented a case with unexpected abnormal coagulation test results in a 2-year-old child with no previously documented coagulation disorder. Child is planned for tympanostomy tubes removal under the anaesthesia driven procedure, and preoperative coagulation tests revealed prolonged prothrombin time, activated partial thromboplastin time and thrombin time, with fibrinogen and antithrombin within reference intervals. From the anamnestic and clinical data, congenital coagulation disorder was excluded, and with further investigation, sample mismatch, clot presence and accidental ingestion of oral anticoagulant, heparin contamination or vitamin K deficiency were excluded too. Due to suspected EDTA carryover during blood sampling another sample was taken the same day and all tests were performed again. The results for all tests were within reference intervals confirming EDTA effect on falsely prolongation of the coagulation times in the first sample. This case can serve as alert to avoid unnecessary loss in terms of blood withdrawal repetitions and discomfort of the patients and their relatives, tests repeating, prolonging medical procedures, and probably delaying diagnosis or proper medical treatment. It is the responsibility of the laboratory specialists to continuously educate laboratory staff and other phlebotomists on the correct blood collection as well as on its importance for the patient's safety."
],
"authors": [
"Banković Radovanović, Patricija, Živković Mikulčić, Tanja, Simović Medica, Jasmina"
],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
}
),
pd.DataFrame(
{
"pmid": 31839730,
"date_completed": ["--"],
"pub_model": ["Print-Electronic"],
"title": ["Narrative inquiry : NI"],
"iso_abbreviation": ["Narrat Inq"],
"article_title": [
"Narrative Assessments with First Grade Spanish-English Emergent Bilinguals: Spontaneous versus Retell Conditions."
],
"abstract": [
"This study used qualitative analyses to investigate similarities and differences in narrative production across two task conditions for four first grade Spanish-English emergent bilingual children. Task conditions were spontaneous story generation and retelling using the same story. Spanish stories from two children were compared on the basis of similarity in vocabulary, while English stories from two children were compared on the basis of similarity in overall discourse skills. Results show that when the total number of words used was similar across English narratives, the retell included more different words and higher quality story structure than the spontaneous story. When overall discourse scores in the Spanish examples were similar, the spontaneous story required more words than the retell, but also included more central events and greater detail. Yet, the retell included more advanced narrative components. This study contributes to our understanding of narrative skills in young Spanish-English bilinguals across task conditions."
],
"authors": ["Lucero, Audrey, Uchikoshi, Yuuko"],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
"other col": [""], # Extra column
}
),
pd.DataFrame(
{
"pmid": 31839731,
"date_completed": ["--"],
"pub_model": ["Print"],
"title": ["Acta ortopedica brasileira"],
"iso_abbreviation": ["Acta Ortop Bras"],
"article_title": [
"CHRONIC MONTEGGIA FRACTURE-DISLOCATION IN CHILDREN SURGICAL STRATEGY AND RESULTS."
],
"abstract": [
"To report surgical techniques and results in the treatment of chronic Monteggia fracture-dislocation in children."
],
"authors": [
"Soni, Jamil Faissal, Valenza, Weverley Rubele, Matsunaga, Carolina Umeta, Costa, Anna Carolina Pavelec, Faria, Fernando Ferraz"
],
"language": ["eng"],
# Missing column
"mesh_list": [""],
}
),
pd.DataFrame(
{
"pmid": 31839732,
"date_completed": ["--"],
"pub_model": ["Print"],
"title": ["Acta ortopedica brasileira"],
# Missing Column
# Missing Column
"abstract": [
"To evaluate the efficacy of platelet-rich plasma (PRP) and tranexamic acid (TXA) applied in total knee arthroplasty."
],
"authors": [
"Guerreiro, João Paulo Fernandes, Lima, Diogenes Rodrigues, Bordignon, Glaucia, Danieli, Marcus Vinicius, Queiroz, Alexandre Oliveira, Cataneo, Daniele Cristina"
],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
}
),
]
"""Array of invalid PubMed dataframes."""
PUBMED_VALID_DF = [
pd.DataFrame(
{
"pmid": 31839728,
"date_completed": ["--"],
"pub_model": ["Print-Electronic"],
"title": ["Biochemia medica"],
"iso_abbreviation": ["Biochem Med (Zagreb)"],
"article_title": [
"High anion gap metabolic acidosis caused by D-lactate: mind the time of blood collection."
],
"abstract": [
"D-lactic acidosis is an uncommon cause of high anion gap acidosis."
],
"authors": ["Weemaes, Matthias, Hiele, Martin, Vermeersch, Pieter"],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
"issn": ["1234-5678"],
"issn_type": ["Print"],
"citation_count": 0,
"citation_pmid": [""],
}
),
pd.DataFrame(
{
"pmid": 31839729,
"date_completed": ["--"],
"pub_model": ["Print-Electronic"],
"title": ["Biochemia medica"],
"iso_abbreviation": ["Biochem Med (Zagreb)"],
"article_title": [
"Unexpected abnormal coagulation test results in a 2-year-old child: A case report."
],
"abstract": [
"Rejection of the sample with repeated blood withdrawal is always an unwanted consequence of sample nonconformity and preanalytical errors, especially in the most vulnerable population - children. Here is presented a case with unexpected abnormal coagulation test results in a 2-year-old child with no previously documented coagulation disorder. Child is planned for tympanostomy tubes removal under the anaesthesia driven procedure, and preoperative coagulation tests revealed prolonged prothrombin time, activated partial thromboplastin time and thrombin time, with fibrinogen and antithrombin within reference intervals. From the anamnestic and clinical data, congenital coagulation disorder was excluded, and with further investigation, sample mismatch, clot presence and accidental ingestion of oral anticoagulant, heparin contamination or vitamin K deficiency were excluded too. Due to suspected EDTA carryover during blood sampling another sample was taken the same day and all tests were performed again. The results for all tests were within reference intervals confirming EDTA effect on falsely prolongation of the coagulation times in the first sample. This case can serve as alert to avoid unnecessary loss in terms of blood withdrawal repetitions and discomfort of the patients and their relatives, tests repeating, prolonging medical procedures, and probably delaying diagnosis or proper medical treatment. It is the responsibility of the laboratory specialists to continuously educate laboratory staff and other phlebotomists on the correct blood collection as well as on its importance for the patient's safety."
],
"authors": [
"Banković Radovanović, Patricija, Živković Mikulčić, Tanja, Simović Medica, Jasmina"
],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
"issn": ["1234-5678"],
"issn_type": ["Print"],
"citation_count": 0,
"citation_pmid": [""],
}
),
pd.DataFrame(
{
"pmid": 31839730,
"date_completed": ["--"],
"pub_model": ["Print-Electronic"],
"title": ["Narrative inquiry : NI"],
"iso_abbreviation": ["Narrat Inq"],
"article_title": [
"Narrative Assessments with First Grade Spanish-English Emergent Bilinguals: Spontaneous versus Retell Conditions."
],
"abstract": [
"This study used qualitative analyses to investigate similarities and differences in narrative production across two task conditions for four first grade Spanish-English emergent bilingual children. Task conditions were spontaneous story generation and retelling using the same story. Spanish stories from two children were compared on the basis of similarity in vocabulary, while English stories from two children were compared on the basis of similarity in overall discourse skills. Results show that when the total number of words used was similar across English narratives, the retell included more different words and higher quality story structure than the spontaneous story. When overall discourse scores in the Spanish examples were similar, the spontaneous story required more words than the retell, but also included more central events and greater detail. Yet, the retell included more advanced narrative components. This study contributes to our understanding of narrative skills in young Spanish-English bilinguals across task conditions."
],
"authors": ["Lucero, Audrey, Uchikoshi, Yuuko"],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
"issn": ["1234-5678"],
"issn_type": ["Print"],
"citation_count": 0,
"citation_pmid": [""],
}
),
pd.DataFrame(
{
"pmid": 31839731,
"date_completed": ["--"],
"pub_model": ["Print"],
"title": ["Acta ortopedica brasileira"],
"iso_abbreviation": ["Acta Ortop Bras"],
"article_title": [
"CHRONIC MONTEGGIA FRACTURE-DISLOCATION IN CHILDREN SURGICAL STRATEGY AND RESULTS."
],
"abstract": [
"To report surgical techniques and results in the treatment of chronic Monteggia fracture-dislocation in children."
],
"authors": [
"Soni, Jamil Faissal, Valenza, Weverley Rubele, Matsunaga, Carolina Umeta, Costa, Anna Carolina Pavelec, Faria, Fernando Ferraz"
],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
"issn": ["1234-5678"],
"issn_type": ["Print"],
"citation_count": 0,
"citation_pmid": [""],
}
),
pd.DataFrame(
{
"pmid": 31839732,
"date_completed": ["--"],
"pub_model": ["Print"],
"title": ["Acta ortopedica brasileira"],
"iso_abbreviation": ["Acta Ortop Bras"],
"article_title": [
"PLATELET-RICH PLASMA (PRP) AND TRANEXAMIC ACID (TXA) APPLIED IN TOTAL KNEE ARTHROPLASTY."
],
"abstract": [
"To evaluate the efficacy of platelet-rich plasma (PRP) and tranexamic acid (TXA) applied in total knee arthroplasty."
],
"authors": [
"Guerreiro, João Paulo Fernandes, Lima, Diogenes Rodrigues, Bordignon, Glaucia, Danieli, Marcus Vinicius, Queiroz, Alexandre Oliveira, Cataneo, Daniele Cristina"
],
"language": ["eng"],
"chemicals": [""],
"mesh_list": [""],
"issn": ["1234-5678"],
"issn_type": ["Print"],
"citation_count": 0,
"citation_pmid": [""],
}
),
]
"""Array of valid PubMed dataframes."""
CTD_INVALID_SCHEMA = [
pd.DataFrame(
{
# Missing Digest required field
"genesymbol": ["11-BETA-HSD3"],
"geneid": [100174880],
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"OtherField": [22659286], # Non-existent schema column
"digest": [22659286],
"genesymbol": ["11-BETA-HSD3"],
"geneid": [100174880],
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"digest": [22659286],
"genesymbol": ["11-BETA-HSD3"],
# Missing required GeneID
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"digest": [22659286],
"genesymbol": ["11-BETA-HSD3"],
"geneid": [100174880],
"diseasename": ["Abnormalities, Drug-Induced"],
# Missing required DiseaseID
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"digest": [22659286],
"genesymbol": ["11-BETA-HSD3"],
"geneid": [100174880],
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
# Missing required PubMed
}
),
]
"""Invalid SCHEMA record."""
CTD_INVALID_DF = [
None,
pd.DataFrame({"invalid": [1, 2]}),
pd.DataFrame(
{
"digest": [22659286],
"genesymbol": ["11-BETA-HSD3"],
"geneid": ["A100174880"], # Should be Int
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"digest": [22659286],
"genesymbol": ["11-BETA-HSD3"],
"geneid": [100174880.0], # Should be Int
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"digest": [22659286],
"genesymbol": ["11-BETA-HSD3"],
"geneid": [100174880],
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["MESH:D000014"], # Should not have "MESH:"
"pmids": ["22659286"],
}
),
] + CTD_INVALID_SCHEMA
"""Array of invalid CTD DataFrames because violate parser rules."""
CTD_VALID_DF = [
pd.DataFrame(
{
"digest": [
"b3834d9281286247e377c5700e9689c3660412df24fa0a4921c6e3c213d616aa"
],
"genesymbol": ["11-BETA-HSD3"],
"geneid": [10174880],
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"digest": [
"e96cc1eb2423dad1fd6f4f341574fbb7fff0479a3339c9e2e4f814f2d970e3f00"
],
"genesymbol": ["1-SF3"],
"geneid": [1000494280],
"diseasename": ["Infant Death"],
"diseaseid": ["D0660884"],
"pmids": ["283930756"],
}
),
pd.DataFrame(
{
"digest": [
"e120bfecd61ee146bb3a4c61f8dbb93754b1db25f62aba505872ef5568dd155b"
],
"genesymbol": ["11-BETA-HSD3"],
"geneid": [100174880],
"diseasename": ["Abnormalities, Drug-Induced"],
"diseaseid": ["D000014"],
"pmids": ["22659286"],
}
),
pd.DataFrame(
{
"digest": [
"f847dcfeaaae8fefa96e1f6e97dd5998ffe07365d1254badae927d1769a2eebc"
],
"genesymbol": ["A1BG3"],
"geneid": [1],
"diseasename": ["Muscle Weaknessd"],
"diseaseid": ["D0189084"],
"pmids": ["3515563|54800|62135|63766|6511338|7995496"],
}
),
]
"""Array of valid CTD DataFrames."""
CLSFR_VALID_DF = [
pd.DataFrame(
{
"digest": [
"b3834d9281286247e377c5700e9689c3660412df24fa0a4921c6e3c213d616aa"
],
"pub_score": [0.8],
"ct_score": [0.7],
}
),
pd.DataFrame(
{
"digest": [
"e96cc1eb2423dad1fd6f4f341574fbb7fff0479a3339c9e2e4f814f2d970e3f00"
],
"pub_score": [0.8],
"ct_score": [0.7],
}
),
pd.DataFrame(
{
"digest": [
"e96cc1eb2423dad1fd6f4f341574fbb7fff0479a3339c9e2e4f814f2d970e3f00"
],
"pub_score": [0.8],
"ct_score": [0.7],
}
),
]
"""Array of valid classifier dataframes."""
CLSFR_INVALID_DF = [
pd.DataFrame(
{
"digest": [
"b3834d9281286247e377c5700e9689c3660412df24fa0a4921c6e3c213d616aa"
],
"pub_score": ["1.2"], # GeneID should be float
"ct_score": [0.7],
}
),
pd.DataFrame(
{
"digest": [
"b3834d9281286247e377c5700e9689c3660412df24fa0a4921c6e3c213d616aa"
],
# Missing pub_score
"ct_score": [0.7],
}
),
pd.DataFrame(
{
"digest": [
"e96cc1eb2423dad1fd6f4f341574fbb7fff0479a3339c9e2e4f814f2d970e3f00"
],
"pub_score": [0.8],
"ct_score": ["0.75"], # Should be float
}
),
]
"""Array of invalid classifier dataframes."""
CLSFR_INVALID_SCHEMA = [
pd.DataFrame(
{
"digest": [
"b3834d9281286247e377c5700e9689c3660412df24fa0a4921c6e3c213d616aa"
],
# Missing GeneID
"ct_score": [0.7],
}
),
pd.DataFrame(
{
"digest": [
"e96cc1eb2423dad1fd6f4f341574fbb7fff0479a3339c9e2e4f814f2d970e3f00"
],
"pub_score": [0.8],
"ct_score": [0.7],
"newcol": "newcol",
}
),
pd.DataFrame(
{
# missing digest
"pub_score": [0.8],
"ct_score": [0.7],
}
),
pd.DataFrame(
{
"digest": [
"e96cc1eb2423dad1fd6f4f341574fbb7fff0479a3339c9e2e4f814f2d970e3f00"
],
"pub_score": [0.8],
# Missing DiseaseID
}
),
]
"""Array of valid classifier SCHEMA table schema."""
| 45.087287 | 1,644 | 0.581289 | 2,211 | 23,761 | 6.197648 | 0.194934 | 0.028096 | 0.023571 | 0.019266 | 0.90856 | 0.886302 | 0.879515 | 0.86492 | 0.86492 | 0.861198 | 0 | 0.066287 | 0.313034 | 23,761 | 526 | 1,645 | 45.173004 | 0.773203 | 0.02037 | 0 | 0.765914 | 0 | 0.022587 | 0.611329 | 0.038391 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.008214 | 0 | 0.008214 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
6ad05a8399f3b7dac4b500f2f74262a98a3360a0 | 91 | py | Python | tests/md_module.py | SaidBySolo/dnspython | d4b33cefcf10d8f014c490263568d5f849c8350d | [
"ISC"
] | null | null | null | tests/md_module.py | SaidBySolo/dnspython | d4b33cefcf10d8f014c490263568d5f849c8350d | [
"ISC"
] | 6 | 2021-12-21T21:30:25.000Z | 2022-03-24T21:29:34.000Z | tests/md_module.py | SaidBySolo/dnspython | d4b33cefcf10d8f014c490263568d5f849c8350d | [
"ISC"
] | null | null | null | import dns.rdtypes.nsbase
class MD(dns.rdtypes.nsbase.NSBase):
"""Test MD record."""
| 15.166667 | 36 | 0.692308 | 13 | 91 | 4.846154 | 0.615385 | 0.31746 | 0.507937 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 91 | 5 | 37 | 18.2 | 0.807692 | 0.164835 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6adf55fe03c63d4b687ae37eb3142128c4e92bc0 | 59,142 | py | Python | glance/tests/unit/v2/test_registry_api.py | qweraqq/glance | 65b62485dfa336d26b1eae2d26a7b5e6495109a7 | [
"Apache-2.0"
] | null | null | null | glance/tests/unit/v2/test_registry_api.py | qweraqq/glance | 65b62485dfa336d26b1eae2d26a7b5e6495109a7 | [
"Apache-2.0"
] | 1 | 2021-03-21T11:38:31.000Z | 2021-03-21T11:38:31.000Z | glance/tests/unit/v2/test_registry_api.py | qweraqq/glance | 65b62485dfa336d26b1eae2d26a7b5e6495109a7 | [
"Apache-2.0"
] | 1 | 2021-03-21T11:38:02.000Z | 2021-03-21T11:38:02.000Z | # -*- coding: utf-8 -*-
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
from oslo_config import cfg
from oslo_serialization import jsonutils
import routes
import six
import webob
import glance.api.common
import glance.common.config
from glance.common import timeutils
import glance.context
from glance.db.sqlalchemy import api as db_api
from glance.db.sqlalchemy import models as db_models
from glance.registry.api import v2 as rserver
from glance.tests.unit import base
from glance.tests import utils as test_utils
CONF = cfg.CONF
_gen_uuid = lambda: str(uuid.uuid4())
UUID1 = _gen_uuid()
UUID2 = _gen_uuid()
class TestRegistryRPC(base.IsolatedUnitTest):
def setUp(self):
super(TestRegistryRPC, self).setUp()
self.mapper = routes.Mapper()
self.api = test_utils.FakeAuthMiddleware(rserver.API(self.mapper),
is_admin=True)
uuid1_time = timeutils.utcnow()
uuid2_time = uuid1_time + datetime.timedelta(seconds=5)
self.FIXTURES = [
{'id': UUID1,
'name': 'fake image #1',
'status': 'active',
'disk_format': 'ami',
'container_format': 'ami',
'is_public': False,
'created_at': uuid1_time,
'updated_at': uuid1_time,
'deleted_at': None,
'deleted': False,
'checksum': None,
'min_disk': 0,
'min_ram': 0,
'size': 13,
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID1),
'metadata': {}, 'status': 'active'}],
'properties': {'type': 'kernel'}},
{'id': UUID2,
'name': 'fake image #2',
'status': 'active',
'disk_format': 'vhd',
'container_format': 'ovf',
'is_public': True,
'created_at': uuid2_time,
'updated_at': uuid2_time,
'deleted_at': None,
'deleted': False,
'checksum': None,
'min_disk': 5,
'min_ram': 256,
'size': 19,
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID2),
'metadata': {}, 'status': 'active'}],
'properties': {}}]
self.context = glance.context.RequestContext(is_admin=True)
db_api.get_engine()
self.destroy_fixtures()
self.create_fixtures()
def tearDown(self):
"""Clear the test environment"""
super(TestRegistryRPC, self).tearDown()
self.destroy_fixtures()
def create_fixtures(self):
for fixture in self.FIXTURES:
db_api.image_create(self.context, fixture)
# We write a fake image file to the filesystem
with open("%s/%s" % (self.test_dir, fixture['id']), 'wb') as image:
image.write(b"chunk00000remainder")
image.flush()
def destroy_fixtures(self):
# Easiest to just drop the models and re-create them...
db_models.unregister_models(db_api.get_engine())
db_models.register_models(db_api.get_engine())
def _compare_images_and_uuids(self, uuids, images):
self.assertListEqual(uuids, [image['id'] for image in images])
def test_show(self):
"""Tests that registry API endpoint returns the expected image."""
fixture = {'id': UUID2,
'name': 'fake image #2',
'size': 19,
'min_ram': 256,
'min_disk': 5,
'checksum': None}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get',
'kwargs': {'image_id': UUID2},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
image = res_dict
for k, v in six.iteritems(fixture):
self.assertEqual(v, image[k])
def test_show_unknown(self):
"""Tests the registry API endpoint returns 404 for an unknown id."""
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get',
'kwargs': {'image_id': _gen_uuid()},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual('glance.common.exception.ImageNotFound',
res_dict["_error"]["cls"])
def test_get_index(self):
"""Tests that the image_get_all command returns list of images."""
fixture = {'id': UUID2,
'name': 'fake image #2',
'size': 19,
'checksum': None}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': fixture},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(1, len(images))
for k, v in six.iteritems(fixture):
self.assertEqual(v, images[0][k])
def test_get_index_marker(self):
"""Tests that the registry API returns list of public images.
Must conforms to a marker query param.
"""
uuid5_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid4_time = uuid5_time + datetime.timedelta(seconds=5)
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 19,
'checksum': None,
'created_at': uuid3_time,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': uuid4_time,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
UUID5 = _gen_uuid()
extra_fixture = {'id': UUID5,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': uuid5_time,
'updated_at': uuid5_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID4, "is_public": True},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
# should be sorted by created_at desc, id desc
# page should start after marker 4
uuid_list = [UUID5, UUID2]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_marker_and_name_asc(self):
"""Test marker and null name ascending
Tests that the registry API returns 200
when a marker and a null name are combined
ascending order
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': None,
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID3, 'sort_key': ['name'],
'sort_dir': ['asc']},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(2, len(images))
def test_get_index_marker_and_name_desc(self):
"""Test marker and null name descending
Tests that the registry API returns 200
when a marker and a null name are combined
descending order
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': None,
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID3, 'sort_key': ['name'],
'sort_dir': ['desc']},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
def test_get_index_marker_and_disk_format_asc(self):
"""Test marker and null disk format ascending
Tests that the registry API returns 200
when a marker and a null disk_format are combined
ascending order
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': None,
'container_format': 'ovf',
'name': 'Fake image',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID3, 'sort_key': ['disk_format'],
'sort_dir': ['asc']},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(2, len(images))
def test_get_index_marker_and_disk_format_desc(self):
"""Test marker and null disk format descending
Tests that the registry API returns 200
when a marker and a null disk_format are combined
descending order
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': None,
'container_format': 'ovf',
'name': 'Fake image',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID3, 'sort_key': ['disk_format'],
'sort_dir': ['desc']},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
def test_get_index_marker_and_container_format_asc(self):
"""Test marker and null container format ascending
Tests that the registry API returns 200
when a marker and a null container_format are combined
ascending order
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': None,
'name': 'Fake image',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID3, 'sort_key': ['container_format'],
'sort_dir': ['asc']},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(2, len(images))
def test_get_index_marker_and_container_format_desc(self):
"""Test marker and null container format descending
Tests that the registry API returns 200
when a marker and a null container_format are combined
descending order
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': None,
'name': 'Fake image',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID3, 'sort_key': ['container_format'],
'sort_dir': ['desc']},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
def test_get_index_unknown_marker(self):
"""Tests the registry API returns a NotFound with unknown marker."""
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': _gen_uuid()},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
result = jsonutils.loads(res.body)[0]
self.assertIn("_error", result)
self.assertIn("NotFound", result["_error"]["cls"])
def test_get_index_limit(self):
"""Tests that the registry API returns list of public images.
Must conforms to a limit query param.
"""
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid4_time = uuid3_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 19,
'checksum': None,
'created_at': uuid3_time,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': uuid4_time,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'limit': 1},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
images = jsonutils.loads(res.body)[0]
self.assertEqual(200, res.status_int)
self._compare_images_and_uuids([UUID4], images)
def test_get_index_limit_marker(self):
"""Tests that the registry API returns list of public images.
Must conforms to limit and marker query params.
"""
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid4_time = uuid3_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 19,
'checksum': None,
'created_at': uuid3_time,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
extra_fixture = {'id': _gen_uuid(),
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': uuid4_time,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'marker': UUID3, 'limit': 1},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(200, res.status_int)
images = res_dict
self._compare_images_and_uuids([UUID2], images)
def test_get_index_filter_name(self):
"""Tests that the registry API returns list of public images.
Use a specific name. This is really a sanity check, filtering is
tested more in-depth using /images/detail
"""
extra_fixture = {'id': _gen_uuid(),
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
extra_fixture = {'id': _gen_uuid(),
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'name': 'new name! #123'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(200, res.status_int)
images = res_dict
self.assertEqual(2, len(images))
for image in images:
self.assertEqual('new name! #123', image['name'])
def test_get_index_filter_on_user_defined_properties(self):
"""Tests that the registry API returns list of public images.
Use a specific user-defined properties.
"""
properties = {'distro': 'ubuntu', 'arch': 'i386', 'type': 'kernel'}
extra_id = _gen_uuid()
extra_fixture = {'id': extra_id,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'image-extra-1',
'size': 19, 'properties': properties,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
# testing with a common property.
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'type': 'kernel'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(2, len(images))
self.assertEqual(extra_id, images[0]['id'])
self.assertEqual(UUID1, images[1]['id'])
# testing with a non-existent value for a common property.
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'type': 'random'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
# testing with a non-existent value for a common property.
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'type': 'random'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
# testing with a non-existent property.
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'poo': 'random'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
# testing with multiple existing properties.
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'type': 'kernel', 'distro': 'ubuntu'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(1, len(images))
self.assertEqual(extra_id, images[0]['id'])
# testing with multiple existing properties but non-existent values.
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'type': 'random', 'distro': 'random'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
# testing with multiple non-existing properties.
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'typo': 'random', 'poo': 'random'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
# testing with one existing property and the other non-existing.
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'type': 'kernel', 'poo': 'random'}},
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
images = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(images))
def test_get_index_sort_default_created_at_desc(self):
"""Tests that the registry API returns list of public images.
Must conforms to a default sort key/dir.
"""
uuid5_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid4_time = uuid5_time + datetime.timedelta(seconds=5)
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 19,
'checksum': None,
'created_at': uuid3_time,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': uuid4_time,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
UUID5 = _gen_uuid()
extra_fixture = {'id': UUID5,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': uuid5_time,
'updated_at': uuid5_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(200, res.status_int)
images = res_dict
# (flaper87)registry's v1 forced is_public to True
# when no value was specified. This is not
# the default behaviour anymore.
uuid_list = [UUID3, UUID4, UUID5, UUID2, UUID1]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_name_asc(self):
"""Tests that the registry API returns list of public images.
Must be sorted alphabetically by name in ascending order.
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'xyz',
'size': 20,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
UUID5 = _gen_uuid()
extra_fixture = {'id': UUID5,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': None,
'size': 20,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['name'], 'sort_dir': ['asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID5, UUID3, UUID1, UUID2, UUID4]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_status_desc(self):
"""Tests that the registry API returns list of public images.
Must be sorted alphabetically by status in descending order.
"""
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'queued',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'xyz',
'size': 20,
'checksum': None,
'created_at': uuid4_time,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['status'], 'sort_dir': ['asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID1, UUID2, UUID4, UUID3]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_disk_format_asc(self):
"""Tests that the registry API returns list of public images.
Must be sorted alphabetically by disk_format in ascending order.
"""
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'ami',
'container_format': 'ami',
'name': 'asdf',
'size': 19,
'checksum': None,
'created_at': uuid3_time,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vdi',
'container_format': 'ovf',
'name': 'xyz',
'size': 20,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['disk_format'], 'sort_dir': ['asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID1, UUID3, UUID4, UUID2]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_container_format_desc(self):
"""Tests that the registry API returns list of public images.
Must be sorted alphabetically by container_format in descending order.
"""
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'ami',
'container_format': 'ami',
'name': 'asdf',
'size': 19,
'checksum': None,
'created_at': uuid3_time,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'iso',
'container_format': 'bare',
'name': 'xyz',
'size': 20,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['container_format'],
'sort_dir': ['desc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID2, UUID4, UUID3, UUID1]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_size_asc(self):
"""Tests that the registry API returns list of public images.
Must be sorted by size in ascending order.
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'ami',
'container_format': 'ami',
'name': 'asdf',
'size': 100,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'iso',
'container_format': 'bare',
'name': 'xyz',
'size': 2,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['size'],
'sort_dir': ['asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID4, UUID1, UUID2, UUID3]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_created_at_asc(self):
"""Tests that the registry API returns list of public images.
Must be sorted by created_at in ascending order.
"""
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 19,
'checksum': None,
'created_at': uuid3_time,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': uuid4_time,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['created_at'],
'sort_dir': ['asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID1, UUID2, UUID4, UUID3]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_updated_at_desc(self):
"""Tests that the registry API returns list of public images.
Must be sorted by updated_at in descending order.
"""
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 19,
'checksum': None,
'created_at': None,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'new name! #123',
'size': 20,
'checksum': None,
'created_at': None,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['updated_at'],
'sort_dir': ['desc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID3, UUID4, UUID2, UUID1]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_multiple_keys_one_sort_dir(self):
"""
Tests that the registry API returns list of
public images sorted by name-size and size-name with ascending
sort direction.
"""
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 19,
'checksum': None,
'created_at': None,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'xyz',
'size': 20,
'checksum': None,
'created_at': None,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
UUID5 = _gen_uuid()
extra_fixture = {'id': UUID5,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 20,
'checksum': None,
'created_at': None,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['name', 'size'],
'sort_dir': ['asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID3, UUID5, UUID1, UUID2, UUID4]
self._compare_images_and_uuids(uuid_list, images)
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['size', 'name'],
'sort_dir': ['asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID1, UUID3, UUID2, UUID5, UUID4]
self._compare_images_and_uuids(uuid_list, images)
def test_get_index_sort_multiple_keys_multiple_sort_dirs(self):
"""
Tests that the registry API returns list of
public images sorted by name-size and size-name
with ascending and descending directions.
"""
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 19,
'checksum': None,
'created_at': None,
'updated_at': uuid3_time}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'xyz',
'size': 20,
'checksum': None,
'created_at': None,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
UUID5 = _gen_uuid()
extra_fixture = {'id': UUID5,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 20,
'checksum': None,
'created_at': None,
'updated_at': uuid4_time}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['name', 'size'],
'sort_dir': ['desc', 'asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID4, UUID2, UUID1, UUID3, UUID5]
self._compare_images_and_uuids(uuid_list, images)
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['size', 'name'],
'sort_dir': ['desc', 'asc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID5, UUID4, UUID3, UUID2, UUID1]
self._compare_images_and_uuids(uuid_list, images)
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['name', 'size'],
'sort_dir': ['asc', 'desc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID5, UUID3, UUID1, UUID2, UUID4]
self._compare_images_and_uuids(uuid_list, images)
cmd = [{
'command': 'image_get_all',
'kwargs': {'sort_key': ['size', 'name'],
'sort_dir': ['asc', 'desc']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
images = res_dict
uuid_list = [UUID1, UUID2, UUID3, UUID4, UUID5]
self._compare_images_and_uuids(uuid_list, images)
def test_create_image(self):
"""Tests that the registry API creates the image"""
fixture = {'name': 'fake public image',
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf'}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_create',
'kwargs': {'values': fixture}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
for k, v in six.iteritems(fixture):
self.assertEqual(v, res_dict[k])
# Test status was updated properly
self.assertEqual('active', res_dict['status'])
def test_create_image_with_min_disk(self):
"""Tests that the registry API creates the image"""
fixture = {'name': 'fake public image',
'is_public': True,
'status': 'active',
'min_disk': 5,
'disk_format': 'vhd',
'container_format': 'ovf'}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_create',
'kwargs': {'values': fixture}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(fixture['min_disk'], res_dict['min_disk'])
def test_create_image_with_min_ram(self):
"""Tests that the registry API creates the image"""
fixture = {'name': 'fake public image',
'is_public': True,
'status': 'active',
'min_ram': 256,
'disk_format': 'vhd',
'container_format': 'ovf'}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_create',
'kwargs': {'values': fixture}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(fixture['min_ram'], res_dict['min_ram'])
def test_create_image_with_min_ram_default(self):
"""Tests that the registry API creates the image"""
fixture = {'name': 'fake public image',
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf'}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_create',
'kwargs': {'values': fixture}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(0, res_dict['min_ram'])
def test_create_image_with_min_disk_default(self):
"""Tests that the registry API creates the image"""
fixture = {'name': 'fake public image',
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf'}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_create',
'kwargs': {'values': fixture}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(0, res_dict['min_disk'])
def test_update_image(self):
"""Tests that the registry API updates the image"""
fixture = {'name': 'fake public image #2',
'min_disk': 5,
'min_ram': 256,
'disk_format': 'raw'}
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_update',
'kwargs': {'values': fixture,
'image_id': UUID2}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
res_dict = jsonutils.loads(res.body)[0]
self.assertNotEqual(res_dict['created_at'],
res_dict['updated_at'])
for k, v in six.iteritems(fixture):
self.assertEqual(v, res_dict[k])
def _send_request(self, command, kwargs, method):
req = webob.Request.blank('/rpc')
req.method = method
cmd = [{'command': command, 'kwargs': kwargs}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
res_dict = jsonutils.loads(res.body)[0]
return res.status_int, res_dict
def _expect_fail(self, command, kwargs, error_cls, method='POST'):
# on any exception status_int is always 200, so have to check _error
# dict
code, res_dict = self._send_request(command, kwargs, method)
self.assertIn('_error', res_dict)
self.assertEqual(error_cls, res_dict['_error']['cls'])
return res_dict
def _expect_ok(self, command, kwargs, method, expected_status=200):
code, res_dict = self._send_request(command, kwargs)
self.assertEqual(expected_status, code)
return res_dict
def test_create_image_bad_name(self):
fixture = {'name': u'A bad name \U0001fff2', 'status': 'queued'}
self._expect_fail('image_create',
{'values': fixture},
'glance.common.exception.Invalid')
def test_create_image_bad_location(self):
fixture = {'status': 'queued',
'locations': [{'url': u'file:///tmp/tests/\U0001fee2',
'metadata': {},
'status': 'active'}]}
self._expect_fail('image_create',
{'values': fixture},
'glance.common.exception.Invalid')
def test_create_image_bad_property(self):
fixture = {'status': 'queued',
'properties': {'ok key': u' bad value \U0001f2aa'}}
self._expect_fail('image_create',
{'values': fixture},
'glance.common.exception.Invalid')
fixture = {'status': 'queued',
'properties': {u'invalid key \U00010020': 'ok value'}}
self._expect_fail('image_create',
{'values': fixture},
'glance.common.exception.Invalid')
def test_update_image_bad_tag(self):
self._expect_fail('image_tag_create',
{'value': u'\U0001fff2', 'image_id': UUID2},
'glance.common.exception.Invalid')
def test_update_image_bad_name(self):
fixture = {'name': u'A bad name \U0001fff2'}
self._expect_fail('image_update',
{'values': fixture, 'image_id': UUID1},
'glance.common.exception.Invalid')
def test_update_image_bad_location(self):
fixture = {'locations':
[{'url': u'file:///tmp/glance-tests/\U0001fee2',
'metadata': {},
'status': 'active'}]}
self._expect_fail('image_update',
{'values': fixture, 'image_id': UUID1},
'glance.common.exception.Invalid')
def test_update_bad_property(self):
fixture = {'properties': {'ok key': u' bad value \U0001f2aa'}}
self._expect_fail('image_update',
{'values': fixture, 'image_id': UUID2},
'glance.common.exception.Invalid')
fixture = {'properties': {u'invalid key \U00010020': 'ok value'}}
self._expect_fail('image_update',
{'values': fixture, 'image_id': UUID2},
'glance.common.exception.Invalid')
def test_delete_image(self):
"""Tests that the registry API deletes the image"""
# Grab the original number of images
req = webob.Request.blank('/rpc')
req.method = "POST"
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'deleted': False}}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(200, res.status_int)
orig_num_images = len(res_dict)
# Delete image #2
cmd = [{
'command': 'image_destroy',
'kwargs': {'image_id': UUID2}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
# Verify one less image
cmd = [{
'command': 'image_get_all',
'kwargs': {'filters': {'deleted': False}}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
res_dict = jsonutils.loads(res.body)[0]
self.assertEqual(200, res.status_int)
new_num_images = len(res_dict)
self.assertEqual(new_num_images, orig_num_images - 1)
def test_delete_image_response(self):
"""Tests that the registry API delete returns the image metadata"""
image = self.FIXTURES[0]
req = webob.Request.blank('/rpc')
req.method = 'POST'
cmd = [{
'command': 'image_destroy',
'kwargs': {'image_id': image['id']}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
deleted_image = jsonutils.loads(res.body)[0]
self.assertEqual(image['id'], deleted_image['id'])
self.assertTrue(deleted_image['deleted'])
self.assertTrue(deleted_image['deleted_at'])
def test_get_image_members(self):
"""Tests members listing for existing images."""
req = webob.Request.blank('/rpc')
req.method = 'POST'
cmd = [{
'command': 'image_member_find',
'kwargs': {'image_id': UUID2}
}]
req.body = jsonutils.dump_as_bytes(cmd)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
memb_list = jsonutils.loads(res.body)[0]
self.assertEqual(0, len(memb_list))
| 36.439926 | 79 | 0.503821 | 6,080 | 59,142 | 4.681086 | 0.057072 | 0.042163 | 0.026984 | 0.03373 | 0.852219 | 0.828045 | 0.816732 | 0.802607 | 0.79442 | 0.775342 | 0 | 0.020287 | 0.374894 | 59,142 | 1,622 | 80 | 36.462392 | 0.749554 | 0.081989 | 0 | 0.838164 | 0 | 0 | 0.152561 | 0.007073 | 0 | 0 | 0 | 0 | 0.070048 | 1 | 0.039453 | false | 0 | 0.012882 | 0 | 0.055556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6ae000bfa0f6eb1bfbc57ca00d182f0cd1c2b172 | 18,355 | py | Python | conans/test/unittests/client/build/cpp_std_flags_test.py | ssaavedra/conan | e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c | [
"MIT"
] | 1 | 2021-08-05T15:33:08.000Z | 2021-08-05T15:33:08.000Z | conans/test/unittests/client/build/cpp_std_flags_test.py | ssaavedra/conan | e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c | [
"MIT"
] | null | null | null | conans/test/unittests/client/build/cpp_std_flags_test.py | ssaavedra/conan | e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c | [
"MIT"
] | null | null | null | import unittest
from conans.client.build.cppstd_flags import cppstd_default
from conans.test.utils.mocks import MockSettings
from conans.tools import cppstd_flag
def _make_cppstd_flag(compiler, compiler_version, cppstd=None, compiler_base=None):
settings = MockSettings({"compiler": compiler,
"compiler.version": compiler_version,
"compiler.cppstd": cppstd})
if compiler_base:
settings.values["compiler.base"] = compiler_base
return cppstd_flag(settings)
def _make_cppstd_default(compiler, compiler_version, compiler_base=None):
settings = MockSettings({"compiler": compiler,
"compiler.version": compiler_version})
if compiler_base:
settings.values["compiler.base"] = compiler_base
return cppstd_default(settings)
class CompilerFlagsTest(unittest.TestCase):
def test_gcc_cppstd_flags(self):
self.assertEqual(_make_cppstd_flag("gcc", "4.2", "98"), "-std=c++98")
self.assertEqual(_make_cppstd_flag("gcc", "4.2", "gnu98"), "-std=gnu++98")
self.assertEqual(_make_cppstd_flag("gcc", "4.2", "11"), None)
self.assertEqual(_make_cppstd_flag("gcc", "4.2", "14"), None)
self.assertEqual(_make_cppstd_flag("gcc", "4.3", "98"), "-std=c++98")
self.assertEqual(_make_cppstd_flag("gcc", "4.3", "gnu98"), "-std=gnu++98")
self.assertEqual(_make_cppstd_flag("gcc", "4.3", "11"), "-std=c++0x")
self.assertEqual(_make_cppstd_flag("gcc", "4.3", "14"), None)
self.assertEqual(_make_cppstd_flag("gcc", "4.6", "11"), '-std=c++0x')
self.assertEqual(_make_cppstd_flag("gcc", "4.6", "14"), None)
self.assertEqual(_make_cppstd_flag("gcc", "4.7", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("gcc", "4.7", "14"), None)
self.assertEqual(_make_cppstd_flag("gcc", "4.8", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("gcc", "4.8", "14"), '-std=c++1y')
self.assertEqual(_make_cppstd_flag("gcc", "4.8", "17"), None)
self.assertEqual(_make_cppstd_flag("gcc", "4.9", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("gcc", "4.9", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("gcc", "4.9", "17"), None)
self.assertEqual(_make_cppstd_flag("gcc", "5", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("gcc", "5", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("gcc", "5", "gnu14"), '-std=gnu++14')
self.assertEqual(_make_cppstd_flag("gcc", "5", "17"), None)
self.assertEqual(_make_cppstd_flag("gcc", "5.1", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("gcc", "5.1", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("gcc", "5.1", "17"), '-std=c++1z')
self.assertEqual(_make_cppstd_flag("gcc", "7", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("gcc", "7", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("gcc", "7", "17"), '-std=c++17')
self.assertEqual(_make_cppstd_flag("gcc", "8", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("gcc", "8", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("gcc", "8", "17"), '-std=c++17')
self.assertEqual(_make_cppstd_flag("gcc", "8", "20"), '-std=c++2a')
def test_gcc_cppstd_defaults(self):
self.assertEqual(_make_cppstd_default("gcc", "4"), "gnu98")
self.assertEqual(_make_cppstd_default("gcc", "5"), "gnu98")
self.assertEqual(_make_cppstd_default("gcc", "6"), "gnu14")
self.assertEqual(_make_cppstd_default("gcc", "6.1"), "gnu14")
self.assertEqual(_make_cppstd_default("gcc", "7.3"), "gnu14")
self.assertEqual(_make_cppstd_default("gcc", "8.1"), "gnu14")
def test_clang_cppstd_flags(self):
self.assertEqual(_make_cppstd_flag("clang", "2.0", "98"), None)
self.assertEqual(_make_cppstd_flag("clang", "2.0", "gnu98"), None)
self.assertEqual(_make_cppstd_flag("clang", "2.0", "11"), None)
self.assertEqual(_make_cppstd_flag("clang", "2.0", "14"), None)
self.assertEqual(_make_cppstd_flag("clang", "2.1", "98"), "-std=c++98")
self.assertEqual(_make_cppstd_flag("clang", "2.1", "gnu98"), "-std=gnu++98")
self.assertEqual(_make_cppstd_flag("clang", "2.1", "11"), "-std=c++0x")
self.assertEqual(_make_cppstd_flag("clang", "2.1", "14"), None)
self.assertEqual(_make_cppstd_flag("clang", "3.0", "11"), '-std=c++0x')
self.assertEqual(_make_cppstd_flag("clang", "3.0", "14"), None)
self.assertEqual(_make_cppstd_flag("clang", "3.1", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("clang", "3.1", "14"), None)
self.assertEqual(_make_cppstd_flag("clang", "3.4", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("clang", "3.4", "14"), '-std=c++1y')
self.assertEqual(_make_cppstd_flag("clang", "3.4", "17"), None)
self.assertEqual(_make_cppstd_flag("clang", "3.5", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("clang", "3.5", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("clang", "3.5", "17"), '-std=c++1z')
self.assertEqual(_make_cppstd_flag("clang", "5", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("clang", "5", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("clang", "5", "gnu14"), '-std=gnu++14')
self.assertEqual(_make_cppstd_flag("clang", "5", "17"), '-std=c++17')
self.assertEqual(_make_cppstd_flag("clang", "5.1", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("clang", "5.1", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("clang", "5.1", "17"), '-std=c++17')
for version in ["6", "7", "8", "9", "10", "11"]:
self.assertEqual(_make_cppstd_flag("clang", version, "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("clang", version, "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("clang", version, "17"), '-std=c++17')
self.assertEqual(_make_cppstd_flag("clang", version, "20"), '-std=c++2a')
def test_clang_cppstd_defaults(self):
self.assertEqual(_make_cppstd_default("clang", "2"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "2.1"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "3.0"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "3.1"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "3.4"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "3.5"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "5"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "5.1"), "gnu98")
self.assertEqual(_make_cppstd_default("clang", "6"), "gnu14")
self.assertEqual(_make_cppstd_default("clang", "7"), "gnu14")
self.assertEqual(_make_cppstd_default("clang", "8"), "gnu14")
self.assertEqual(_make_cppstd_default("clang", "9"), "gnu14")
self.assertEqual(_make_cppstd_default("clang", "10"), "gnu14")
self.assertEqual(_make_cppstd_default("clang", "11"), "gnu14")
def test_apple_clang_cppstd_flags(self):
self.assertEqual(_make_cppstd_flag("apple-clang", "3.9", "98"), None)
self.assertEqual(_make_cppstd_flag("apple-clang", "3.9", "gnu98"), None)
self.assertEqual(_make_cppstd_flag("apple-clang", "3.9", "11"), None)
self.assertEqual(_make_cppstd_flag("apple-clang", "3.9", "14"), None)
self.assertEqual(_make_cppstd_flag("apple-clang", "4.0", "98"), "-std=c++98")
self.assertEqual(_make_cppstd_flag("apple-clang", "4.0", "gnu98"), "-std=gnu++98")
self.assertEqual(_make_cppstd_flag("apple-clang", "4.0", "11"), "-std=c++11")
self.assertEqual(_make_cppstd_flag("apple-clang", "4.0", "14"), None)
self.assertEqual(_make_cppstd_flag("apple-clang", "5.0", "98"), "-std=c++98")
self.assertEqual(_make_cppstd_flag("apple-clang", "5.0", "gnu98"), "-std=gnu++98")
self.assertEqual(_make_cppstd_flag("apple-clang", "5.0", "11"), "-std=c++11")
self.assertEqual(_make_cppstd_flag("apple-clang", "5.0", "14"), None)
self.assertEqual(_make_cppstd_flag("apple-clang", "5.1", "98"), "-std=c++98")
self.assertEqual(_make_cppstd_flag("apple-clang", "5.1", "gnu98"), "-std=gnu++98")
self.assertEqual(_make_cppstd_flag("apple-clang", "5.1", "11"), "-std=c++11")
self.assertEqual(_make_cppstd_flag("apple-clang", "5.1", "14"), "-std=c++1y")
self.assertEqual(_make_cppstd_flag("apple-clang", "6.1", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("apple-clang", "6.1", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("apple-clang", "6.1", "17"), "-std=c++1z")
self.assertEqual(_make_cppstd_flag("apple-clang", "7", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("apple-clang", "7", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("apple-clang", "7", "17"), "-std=c++1z")
self.assertEqual(_make_cppstd_flag("apple-clang", "8", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("apple-clang", "8", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("apple-clang", "8", "17"), "-std=c++1z")
self.assertEqual(_make_cppstd_flag("apple-clang", "9", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("apple-clang", "9", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("apple-clang", "9", "17"), "-std=c++1z")
self.assertEqual(_make_cppstd_flag("apple-clang", "9.1", "11"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("apple-clang", "9.1", "14"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("apple-clang", "9.1", "17"), "-std=c++17")
self.assertEqual(_make_cppstd_flag("apple-clang", "9.1", "20"), None)
self.assertEqual(_make_cppstd_flag("apple-clang", "10.0", "17"), "-std=c++17")
self.assertEqual(_make_cppstd_flag("apple-clang", "10.0", "20"), "-std=c++2a")
self.assertEqual(_make_cppstd_flag("apple-clang", "11.0", "17"), "-std=c++17")
self.assertEqual(_make_cppstd_flag("apple-clang", "11.0", "20"), "-std=c++2a")
self.assertEqual(_make_cppstd_flag("apple-clang", "12.0", "17"), "-std=c++17")
self.assertEqual(_make_cppstd_flag("apple-clang", "12.0", "20"), "-std=c++2a")
def test_apple_clang_cppstd_defaults(self):
self.assertEqual(_make_cppstd_default("apple-clang", "2"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "3"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "4"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "5"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "6"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "7"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "8"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "9"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "10"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "11"), "gnu98")
self.assertEqual(_make_cppstd_default("apple-clang", "12"), "gnu98")
def test_visual_cppstd_flags(self):
self.assertEqual(_make_cppstd_flag("Visual Studio", "12", "11"), None)
self.assertEqual(_make_cppstd_flag("Visual Studio", "12", "14"), None)
self.assertEqual(_make_cppstd_flag("Visual Studio", "12", "17"), None)
self.assertEqual(_make_cppstd_flag("Visual Studio", "14", "11"), None)
self.assertEqual(_make_cppstd_flag("Visual Studio", "14", "14"), '/std:c++14')
self.assertEqual(_make_cppstd_flag("Visual Studio", "14", "17"), '/std:c++latest')
self.assertEqual(_make_cppstd_flag("Visual Studio", "17", "11"), None)
self.assertEqual(_make_cppstd_flag("Visual Studio", "17", "14"), '/std:c++14')
self.assertEqual(_make_cppstd_flag("Visual Studio", "17", "17"), '/std:c++17')
self.assertEqual(_make_cppstd_flag("Visual Studio", "17", "20"), '/std:c++latest')
def test_visual_cppstd_defaults(self):
self.assertEqual(_make_cppstd_default("Visual Studio", "11"), None)
self.assertEqual(_make_cppstd_default("Visual Studio", "12"), None)
self.assertEqual(_make_cppstd_default("Visual Studio", "13"), None)
self.assertEqual(_make_cppstd_default("Visual Studio", "14"), "14")
self.assertEqual(_make_cppstd_default("Visual Studio", "15"), "14")
def test_intel_visual_cppstd_defaults(self):
self.assertEqual(_make_cppstd_default("intel", "19", "Visual Studio"), None)
def test_intel_gcc_cppstd_defaults(self):
self.assertEqual(_make_cppstd_default("intel", "19", "gcc"), 'gnu98')
def test_intel_visual_cppstd_flag(self):
self.assertEqual(_make_cppstd_flag("intel", "19.1", "gnu98", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "19.1", "11", "Visual Studio"), '/Qstd=c++11')
self.assertEqual(_make_cppstd_flag("intel", "19.1", "14", "Visual Studio"), '/Qstd=c++14')
self.assertEqual(_make_cppstd_flag("intel", "19.1", "17", "Visual Studio"), '/Qstd=c++17')
self.assertEqual(_make_cppstd_flag("intel", "19.1", "20", "Visual Studio"), '/Qstd=c++20')
self.assertEqual(_make_cppstd_flag("intel", "19", "gnu98", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "19", "11", "Visual Studio"), '/Qstd=c++11')
self.assertEqual(_make_cppstd_flag("intel", "19", "14", "Visual Studio"), '/Qstd=c++14')
self.assertEqual(_make_cppstd_flag("intel", "19", "17", "Visual Studio"), '/Qstd=c++17')
self.assertEqual(_make_cppstd_flag("intel", "19", "20", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "17", "gnu98", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "17", "11", "Visual Studio"), '/Qstd=c++11')
self.assertEqual(_make_cppstd_flag("intel", "17", "14", "Visual Studio"), '/Qstd=c++14')
self.assertEqual(_make_cppstd_flag("intel", "17", "17", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "17", "20", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "15", "gnu98", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "15", "11", "Visual Studio"), '/Qstd=c++11')
self.assertEqual(_make_cppstd_flag("intel", "15", "14", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "15", "17", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "15", "20", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "12", "gnu98", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "12", "11", "Visual Studio"), '/Qstd=c++0x')
self.assertEqual(_make_cppstd_flag("intel", "12", "14", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "12", "17", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "12", "20", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "gnu98", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "11", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "14", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "17", "Visual Studio"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "20", "Visual Studio"), None)
def test_intel_gcc_cppstd_flag(self):
self.assertEqual(_make_cppstd_flag("intel", "19.1", "gnu98", "gcc"), '-std=gnu++98')
self.assertEqual(_make_cppstd_flag("intel", "19.1", "11", "gcc"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("intel", "19.1", "14", "gcc"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("intel", "19.1", "17", "gcc"), '-std=c++17')
self.assertEqual(_make_cppstd_flag("intel", "19.1", "20", "gcc"), '-std=c++20')
self.assertEqual(_make_cppstd_flag("intel", "19", "gnu98", "gcc"), '-std=gnu++98')
self.assertEqual(_make_cppstd_flag("intel", "19", "11", "gcc"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("intel", "19", "14", "gcc"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("intel", "19", "17", "gcc"), '-std=c++17')
self.assertEqual(_make_cppstd_flag("intel", "19", "20", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "17", "gnu98", "gcc"), '-std=gnu++98')
self.assertEqual(_make_cppstd_flag("intel", "17", "11", "gcc"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("intel", "17", "14", "gcc"), '-std=c++14')
self.assertEqual(_make_cppstd_flag("intel", "17", "17", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "17", "20", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "15", "gnu98", "gcc"), '-std=gnu++98')
self.assertEqual(_make_cppstd_flag("intel", "15", "11", "gcc"), '-std=c++11')
self.assertEqual(_make_cppstd_flag("intel", "15", "14", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "15", "17", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "15", "20", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "12", "gnu98", "gcc"), '-std=gnu++98')
self.assertEqual(_make_cppstd_flag("intel", "12", "11", "gcc"), '-std=c++0x')
self.assertEqual(_make_cppstd_flag("intel", "12", "14", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "12", "17", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "12", "20", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "gnu98", "gcc"), '-std=gnu++98')
self.assertEqual(_make_cppstd_flag("intel", "11", "11", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "14", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "17", "gcc"), None)
self.assertEqual(_make_cppstd_flag("intel", "11", "20", "gcc"), None)
| 61.801347 | 98 | 0.626696 | 2,472 | 18,355 | 4.376618 | 0.028317 | 0.193179 | 0.363527 | 0.478325 | 0.955079 | 0.948332 | 0.94371 | 0.894353 | 0.777983 | 0.589426 | 0 | 0.06398 | 0.157832 | 18,355 | 296 | 99 | 62.010135 | 0.635917 | 0 | 0 | 0.02521 | 0 | 0 | 0.225661 | 0 | 0 | 0 | 0 | 0 | 0.869748 | 1 | 0.058824 | false | 0 | 0.016807 | 0 | 0.088235 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0a7299676ae1ebc52bbdd3dab3fcf3fa2006d7d9 | 16,470 | py | Python | deribit_api_python/test/test_transfer_item.py | jDally987/deribit-simple-gui | 60c91f8e11b541b0e59cbd23625639a9b9f0dd43 | [
"MIT"
] | 5 | 2019-06-06T04:48:34.000Z | 2019-10-14T00:31:21.000Z | python/test/test_transfer_item.py | HudsonStreet/hs-deribit-crawler | 243da95e4ce0c2145fb485552be49d812555c34e | [
"MIT"
] | 2 | 2019-07-16T06:11:44.000Z | 2020-05-03T19:03:03.000Z | deribit_api_python/test/test_transfer_item.py | jDally987/deribit-simple-gui | 60c91f8e11b541b0e59cbd23625639a9b9f0dd43 | [
"MIT"
] | 4 | 2019-07-27T16:50:14.000Z | 2019-11-13T21:03:50.000Z | # coding: utf-8
"""
Deribit API
#Overview Deribit provides three different interfaces to access the API: * [JSON-RPC over Websocket](#json-rpc) * [JSON-RPC over HTTP](#json-rpc) * [FIX](#fix-api) (Financial Information eXchange) With the API Console you can use and test the JSON-RPC API, both via HTTP and via Websocket. To visit the API console, go to __Account > API tab > API Console tab.__ ##Naming Deribit tradeable assets or instruments use the following system of naming: |Kind|Examples|Template|Comments| |----|--------|--------|--------| |Future|<code>BTC-25MAR16</code>, <code>BTC-5AUG16</code>|<code>BTC-DMMMYY</code>|<code>BTC</code> is currency, <code>DMMMYY</code> is expiration date, <code>D</code> stands for day of month (1 or 2 digits), <code>MMM</code> - month (3 first letters in English), <code>YY</code> stands for year.| |Perpetual|<code>BTC-PERPETUAL</code> ||Perpetual contract for currency <code>BTC</code>.| |Option|<code>BTC-25MAR16-420-C</code>, <code>BTC-5AUG16-580-P</code>|<code>BTC-DMMMYY-STRIKE-K</code>|<code>STRIKE</code> is option strike price in USD. Template <code>K</code> is option kind: <code>C</code> for call options or <code>P</code> for put options.| # JSON-RPC JSON-RPC is a light-weight remote procedure call (RPC) protocol. The [JSON-RPC specification](https://www.jsonrpc.org/specification) defines the data structures that are used for the messages that are exchanged between client and server, as well as the rules around their processing. JSON-RPC uses JSON (RFC 4627) as data format. JSON-RPC is transport agnostic: it does not specify which transport mechanism must be used. The Deribit API supports both Websocket (preferred) and HTTP (with limitations: subscriptions are not supported over HTTP). ## Request messages > An example of a request message: ```json { \"jsonrpc\": \"2.0\", \"id\": 8066, \"method\": \"public/ticker\", \"params\": { \"instrument\": \"BTC-24AUG18-6500-P\" } } ``` According to the JSON-RPC sepcification the requests must be JSON objects with the following fields. |Name|Type|Description| |----|----|-----------| |jsonrpc|string|The version of the JSON-RPC spec: \"2.0\"| |id|integer or string|An identifier of the request. If it is included, then the response will contain the same identifier| |method|string|The method to be invoked| |params|object|The parameters values for the method. The field names must match with the expected parameter names. The parameters that are expected are described in the documentation for the methods, below.| <aside class=\"warning\"> The JSON-RPC specification describes two features that are currently not supported by the API: <ul> <li>Specification of parameter values by position</li> <li>Batch requests</li> </ul> </aside> ## Response messages > An example of a response message: ```json { \"jsonrpc\": \"2.0\", \"id\": 5239, \"testnet\": false, \"result\": [ { \"currency\": \"BTC\", \"currencyLong\": \"Bitcoin\", \"minConfirmation\": 2, \"txFee\": 0.0006, \"isActive\": true, \"coinType\": \"BITCOIN\", \"baseAddress\": null } ], \"usIn\": 1535043730126248, \"usOut\": 1535043730126250, \"usDiff\": 2 } ``` The JSON-RPC API always responds with a JSON object with the following fields. |Name|Type|Description| |----|----|-----------| |id|integer|This is the same id that was sent in the request.| |result|any|If successful, the result of the API call. The format for the result is described with each method.| |error|error object|Only present if there was an error invoking the method. The error object is described below.| |testnet|boolean|Indicates whether the API in use is actually the test API. <code>false</code> for production server, <code>true</code> for test server.| |usIn|integer|The timestamp when the requests was received (microseconds since the Unix epoch)| |usOut|integer|The timestamp when the response was sent (microseconds since the Unix epoch)| |usDiff|integer|The number of microseconds that was spent handling the request| <aside class=\"notice\"> The fields <code>testnet</code>, <code>usIn</code>, <code>usOut</code> and <code>usDiff</code> are not part of the JSON-RPC standard. <p>In order not to clutter the examples they will generally be omitted from the example code.</p> </aside> > An example of a response with an error: ```json { \"jsonrpc\": \"2.0\", \"id\": 8163, \"error\": { \"code\": 11050, \"message\": \"bad_request\" }, \"testnet\": false, \"usIn\": 1535037392434763, \"usOut\": 1535037392448119, \"usDiff\": 13356 } ``` In case of an error the response message will contain the error field, with as value an object with the following with the following fields: |Name|Type|Description |----|----|-----------| |code|integer|A number that indicates the kind of error.| |message|string|A short description that indicates the kind of error.| |data|any|Additional data about the error. This field may be omitted.| ## Notifications > An example of a notification: ```json { \"jsonrpc\": \"2.0\", \"method\": \"subscription\", \"params\": { \"channel\": \"deribit_price_index.btc_usd\", \"data\": { \"timestamp\": 1535098298227, \"price\": 6521.17, \"index_name\": \"btc_usd\" } } } ``` API users can subscribe to certain types of notifications. This means that they will receive JSON-RPC notification-messages from the server when certain events occur, such as changes to the index price or changes to the order book for a certain instrument. The API methods [public/subscribe](#public-subscribe) and [private/subscribe](#private-subscribe) are used to set up a subscription. Since HTTP does not support the sending of messages from server to client, these methods are only availble when using the Websocket transport mechanism. At the moment of subscription a \"channel\" must be specified. The channel determines the type of events that will be received. See [Subscriptions](#subscriptions) for more details about the channels. In accordance with the JSON-RPC specification, the format of a notification is that of a request message without an <code>id</code> field. The value of the <code>method</code> field will always be <code>\"subscription\"</code>. The <code>params</code> field will always be an object with 2 members: <code>channel</code> and <code>data</code>. The value of the <code>channel</code> member is the name of the channel (a string). The value of the <code>data</code> member is an object that contains data that is specific for the channel. ## Authentication > An example of a JSON request with token: ```json { \"id\": 5647, \"method\": \"private/get_subaccounts\", \"params\": { \"access_token\": \"67SVutDoVZSzkUStHSuk51WntMNBJ5mh5DYZhwzpiqDF\" } } ``` The API consists of `public` and `private` methods. The public methods do not require authentication. The private methods use OAuth 2.0 authentication. This means that a valid OAuth access token must be included in the request, which can get achived by calling method [public/auth](#public-auth). When the token was assigned to the user, it should be passed along, with other request parameters, back to the server: |Connection type|Access token placement |----|-----------| |**Websocket**|Inside request JSON parameters, as an `access_token` field| |**HTTP (REST)**|Header `Authorization: bearer ```Token``` ` value| ### Additional authorization method - basic user credentials <span style=\"color:red\"><b> ! Not recommended - however, it could be useful for quick testing API</b></span></br> Every `private` method could be accessed by providing, inside HTTP `Authorization: Basic XXX` header, values with user `ClientId` and assigned `ClientSecret` (both values can be found on the API page on the Deribit website) encoded with `Base64`: <code>Authorization: Basic BASE64(`ClientId` + `:` + `ClientSecret`)</code> ### Additional authorization method - Deribit signature credentials The Derbit service provides dedicated authorization method, which harness user generated signature to increase security level for passing request data. Generated value is passed inside `Authorization` header, coded as: <code>Authorization: deri-hmac-sha256 id=```ClientId```,ts=```Timestamp```,sig=```Signature```,nonce=```Nonce```</code> where: |Deribit credential|Description |----|-----------| |*ClientId*|Can be found on the API page on the Deribit website| |*Timestamp*|Time when the request was generated - given as **miliseconds**. It's valid for **60 seconds** since generation, after that time any request with an old timestamp will be rejected.| |*Signature*|Value for signature calculated as described below | |*Nonce*|Single usage, user generated initialization vector for the server token| The signature is generated by the following formula: <code> Signature = HEX_STRING( HMAC-SHA256( ClientSecret, StringToSign ) );</code></br> <code> StringToSign = Timestamp + \"\\n\" + Nonce + \"\\n\" + RequestData;</code></br> <code> RequestData = UPPERCASE(HTTP_METHOD()) + \"\\n\" + URI() + \"\\n\" + RequestBody + \"\\n\";</code></br> e.g. (using shell with ```openssl``` tool): <code> ClientId=AAAAAAAAAAA</code></br> <code> ClientSecret=ABCD</code></br> <code> Timestamp=$( date +%s000 )</code></br> <code> Nonce=$( cat /dev/urandom | tr -dc 'a-z0-9' | head -c8 )</code></br> <code> URI=\"/api/v2/private/get_account_summary?currency=BTC\"</code></br> <code> HttpMethod=GET</code></br> <code> Body=\"\"</code></br></br> <code> Signature=$( echo -ne \"${Timestamp}\\n${Nonce}\\n${HttpMethod}\\n${URI}\\n${Body}\\n\" | openssl sha256 -r -hmac \"$ClientSecret\" | cut -f1 -d' ' )</code></br></br> <code> echo $Signature</code></br></br> <code> shell output> ea40d5e5e4fae235ab22b61da98121fbf4acdc06db03d632e23c66bcccb90d2c (**WARNING**: Exact value depends on current timestamp and client credentials</code></br></br> <code> curl -s -X ${HttpMethod} -H \"Authorization: deri-hmac-sha256 id=${ClientId},ts=${Timestamp},nonce=${Nonce},sig=${Signature}\" \"https://www.deribit.com${URI}\"</code></br></br> ### Additional authorization method - signature credentials (WebSocket API) When connecting through Websocket, user can request for authorization using ```client_credential``` method, which requires providing following parameters (as a part of JSON request): |JSON parameter|Description |----|-----------| |*grant_type*|Must be **client_signature**| |*client_id*|Can be found on the API page on the Deribit website| |*timestamp*|Time when the request was generated - given as **miliseconds**. It's valid for **60 seconds** since generation, after that time any request with an old timestamp will be rejected.| |*signature*|Value for signature calculated as described below | |*nonce*|Single usage, user generated initialization vector for the server token| |*data*|**Optional** field, which contains any user specific value| The signature is generated by the following formula: <code> StringToSign = Timestamp + \"\\n\" + Nonce + \"\\n\" + Data;</code></br> <code> Signature = HEX_STRING( HMAC-SHA256( ClientSecret, StringToSign ) );</code></br> e.g. (using shell with ```openssl``` tool): <code> ClientId=AAAAAAAAAAA</code></br> <code> ClientSecret=ABCD</code></br> <code> Timestamp=$( date +%s000 ) # e.g. 1554883365000 </code></br> <code> Nonce=$( cat /dev/urandom | tr -dc 'a-z0-9' | head -c8 ) # e.g. fdbmmz79 </code></br> <code> Data=\"\"</code></br></br> <code> Signature=$( echo -ne \"${Timestamp}\\n${Nonce}\\n${Data}\\n\" | openssl sha256 -r -hmac \"$ClientSecret\" | cut -f1 -d' ' )</code></br></br> <code> echo $Signature</code></br></br> <code> shell output> e20c9cd5639d41f8bbc88f4d699c4baf94a4f0ee320e9a116b72743c449eb994 (**WARNING**: Exact value depends on current timestamp and client credentials</code></br></br> You can also check the signature value using some online tools like, e.g: [https://codebeautify.org/hmac-generator](https://codebeautify.org/hmac-generator) (but don't forget about adding *newline* after each part of the hashed text and remember that you **should use** it only with your **test credentials**). Here's a sample JSON request created using the values from the example above: <code> { </br> \"jsonrpc\" : \"2.0\", </br> \"id\" : 9929, </br> \"method\" : \"public/auth\", </br> \"params\" : </br> { </br> \"grant_type\" : \"client_signature\", </br> \"client_id\" : \"AAAAAAAAAAA\", </br> \"timestamp\": \"1554883365000\", </br> \"nonce\": \"fdbmmz79\", </br> \"data\": \"\", </br> \"signature\" : \"e20c9cd5639d41f8bbc88f4d699c4baf94a4f0ee320e9a116b72743c449eb994\" </br> } </br> } </br> </code> ### Access scope When asking for `access token` user can provide the required access level (called `scope`) which defines what type of functionality he/she wants to use, and whether requests are only going to check for some data or also to update them. Scopes are required and checked for `private` methods, so if you plan to use only `public` information you can stay with values assigned by default. |Scope|Description |----|-----------| |*account:read*|Access to **account** methods - read only data| |*account:read_write*|Access to **account** methods - allows to manage account settings, add subaccounts, etc.| |*trade:read*|Access to **trade** methods - read only data| |*trade:read_write*|Access to **trade** methods - required to create and modify orders| |*wallet:read*|Access to **wallet** methods - read only data| |*wallet:read_write*|Access to **wallet** methods - allows to withdraw, generate new deposit address, etc.| |*wallet:none*, *account:none*, *trade:none*|Blocked access to specified functionality| <span style=\"color:red\">**NOTICE:**</span> Depending on choosing an authentication method (```grant type```) some scopes could be narrowed by the server. e.g. when ```grant_type = client_credentials``` and ```scope = wallet:read_write``` it's modified by the server as ```scope = wallet:read```\" ## JSON-RPC over websocket Websocket is the prefered transport mechanism for the JSON-RPC API, because it is faster and because it can support [subscriptions](#subscriptions) and [cancel on disconnect](#private-enable_cancel_on_disconnect). The code examples that can be found next to each of the methods show how websockets can be used from Python or Javascript/node.js. ## JSON-RPC over HTTP Besides websockets it is also possible to use the API via HTTP. The code examples for 'shell' show how this can be done using curl. Note that subscriptions and cancel on disconnect are not supported via HTTP. #Methods # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import openapi_client
from openapi_client.models.transfer_item import TransferItem # noqa: E501
from openapi_client.rest import ApiException
class TestTransferItem(unittest.TestCase):
"""TransferItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testTransferItem(self):
"""Test TransferItem"""
# FIXME: construct object with mandatory attributes with example values
# model = openapi_client.models.transfer_item.TransferItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 411.75 | 15,697 | 0.691257 | 2,264 | 16,470 | 5.0053 | 0.243816 | 0.057183 | 0.052947 | 0.035298 | 0.257236 | 0.20173 | 0.187787 | 0.167755 | 0.157519 | 0.1494 | 0 | 0.026671 | 0.157681 | 16,470 | 39 | 15,698 | 422.307692 | 0.790168 | 0.958531 | 0 | 0.214286 | 0 | 0 | 0.018307 | 0 | 0 | 0 | 0 | 0.025641 | 0 | 1 | 0.214286 | false | 0.214286 | 0.357143 | 0 | 0.642857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
0a74731a6bf9fde2cf62733828442c1a0c72648b | 51,817 | py | Python | openstack/tests/unit/cloud/test_object.py | anton-sidelnikov/openstacksdk | 98f0c67120b65814c3bd1663415e302551a14536 | [
"Apache-2.0"
] | null | null | null | openstack/tests/unit/cloud/test_object.py | anton-sidelnikov/openstacksdk | 98f0c67120b65814c3bd1663415e302551a14536 | [
"Apache-2.0"
] | null | null | null | openstack/tests/unit/cloud/test_object.py | anton-sidelnikov/openstacksdk | 98f0c67120b65814c3bd1663415e302551a14536 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
from unittest import mock
import testtools
import openstack.cloud
from openstack.cloud import exc
import openstack.cloud.openstackcloud as oc_oc
from openstack import exceptions
from openstack.object_store.v1 import _proxy
from openstack.object_store.v1 import container
from openstack.object_store.v1 import obj
from openstack.tests.unit import base
from openstack import utils
class BaseTestObject(base.TestCase):
def setUp(self):
super(BaseTestObject, self).setUp()
self.container = self.getUniqueString()
self.object = self.getUniqueString()
self.endpoint = self.cloud._object_store_client.get_endpoint()
self.container_endpoint = '{endpoint}/{container}'.format(
endpoint=self.endpoint, container=self.container)
self.object_endpoint = '{endpoint}/{object}'.format(
endpoint=self.container_endpoint, object=self.object)
def _compare_containers(self, exp, real):
self.assertDictEqual(
container.Container(**exp).to_dict(
computed=False),
real.to_dict(computed=False))
def _compare_objects(self, exp, real):
self.assertDictEqual(
obj.Object(**exp).to_dict(
computed=False),
real.to_dict(computed=False))
class TestObject(BaseTestObject):
def test_create_container(self):
"""Test creating a (private) container"""
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint, status_code=404),
dict(method='PUT', uri=self.container_endpoint,
status_code=201,
headers={
'Date': 'Fri, 16 Dec 2016 18:21:20 GMT',
'Content-Length': '0',
'Content-Type': 'text/html; charset=UTF-8',
}),
dict(method='HEAD', uri=self.container_endpoint,
headers={
'Content-Length': '0',
'X-Container-Object-Count': '0',
'Accept-Ranges': 'bytes',
'X-Storage-Policy': 'Policy-0',
'Date': 'Fri, 16 Dec 2016 18:29:05 GMT',
'X-Timestamp': '1481912480.41664',
'X-Trans-Id': 'tx60ec128d9dbf44b9add68-0058543271dfw1',
'X-Container-Bytes-Used': '0',
'Content-Type': 'text/plain; charset=utf-8'})
])
self.cloud.create_container(self.container)
self.assert_calls()
def test_create_container_public(self):
"""Test creating a public container"""
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
status_code=404),
dict(method='PUT', uri=self.container_endpoint,
status_code=201,
headers={
'Date': 'Fri, 16 Dec 2016 18:21:20 GMT',
'Content-Length': '0',
'Content-Type': 'text/html; charset=UTF-8',
'x-container-read':
oc_oc.OBJECT_CONTAINER_ACLS[
'public'],
}),
dict(method='HEAD', uri=self.container_endpoint,
headers={
'Content-Length': '0',
'X-Container-Object-Count': '0',
'Accept-Ranges': 'bytes',
'X-Storage-Policy': 'Policy-0',
'Date': 'Fri, 16 Dec 2016 18:29:05 GMT',
'X-Timestamp': '1481912480.41664',
'X-Trans-Id': 'tx60ec128d9dbf44b9add68-0058543271dfw1',
'X-Container-Bytes-Used': '0',
'Content-Type': 'text/plain; charset=utf-8'})
])
self.cloud.create_container(self.container, public=True)
self.assert_calls()
def test_create_container_exists(self):
"""Test creating a container that exists."""
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
headers={
'Content-Length': '0',
'X-Container-Object-Count': '0',
'Accept-Ranges': 'bytes',
'X-Storage-Policy': 'Policy-0',
'Date': 'Fri, 16 Dec 2016 18:29:05 GMT',
'X-Timestamp': '1481912480.41664',
'X-Trans-Id': 'tx60ec128d9dbf44b9add68-0058543271dfw1',
'X-Container-Bytes-Used': '0',
'Content-Type': 'text/plain; charset=utf-8'})
])
container = self.cloud.create_container(self.container)
self.assert_calls()
self.assertIsNotNone(container)
def test_delete_container(self):
self.register_uris([
dict(method='DELETE', uri=self.container_endpoint)])
self.assertTrue(self.cloud.delete_container(self.container))
self.assert_calls()
def test_delete_container_404(self):
"""No exception when deleting a container that does not exist"""
self.register_uris([
dict(method='DELETE', uri=self.container_endpoint,
status_code=404)])
self.assertFalse(self.cloud.delete_container(self.container))
self.assert_calls()
def test_delete_container_error(self):
"""Non-404 swift error re-raised as OSCE"""
# 409 happens if the container is not empty
self.register_uris([
dict(method='DELETE', uri=self.container_endpoint,
status_code=409)])
self.assertRaises(
openstack.cloud.OpenStackCloudException,
self.cloud.delete_container, self.container)
self.assert_calls()
def test_update_container(self):
headers = {
'x-container-read':
oc_oc.OBJECT_CONTAINER_ACLS['public']}
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=204,
validate=dict(headers=headers))])
self.cloud.update_container(self.container, headers)
self.assert_calls()
def test_update_container_error(self):
"""Swift error re-raised as OSCE"""
# This test is of questionable value - the swift API docs do not
# declare error codes (other than 404 for the container) for this
# method, and I cannot make a synthetic failure to validate a real
# error code. So we're really just testing the shade adapter error
# raising logic here, rather than anything specific to swift.
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=409)])
self.assertRaises(
openstack.cloud.OpenStackCloudException,
self.cloud.update_container, self.container, dict(foo='bar'))
self.assert_calls()
def test_set_container_access_public(self):
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=204,
validate=dict(
headers={
'x-container-read':
oc_oc.OBJECT_CONTAINER_ACLS[
'public']}))])
self.cloud.set_container_access(self.container, 'public')
self.assert_calls()
def test_set_container_access_private(self):
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=204,
validate=dict(
headers={
'x-container-read':
oc_oc.OBJECT_CONTAINER_ACLS[
'private']}))])
self.cloud.set_container_access(self.container, 'private')
self.assert_calls()
def test_set_container_access_invalid(self):
self.assertRaises(
openstack.cloud.OpenStackCloudException,
self.cloud.set_container_access, self.container, 'invalid')
def test_get_container_access(self):
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
headers={
'x-container-read':
str(oc_oc.OBJECT_CONTAINER_ACLS[
'public'])})])
access = self.cloud.get_container_access(self.container)
self.assertEqual('public', access)
def test_get_container_invalid(self):
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
headers={'x-container-read': 'invalid'})])
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Could not determine container access for ACL: invalid"
):
self.cloud.get_container_access(self.container)
def test_get_container_access_not_found(self):
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
status_code=404)])
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Container not found: %s" % self.container
):
self.cloud.get_container_access(self.container)
def test_list_containers(self):
endpoint = '{endpoint}/'.format(
endpoint=self.endpoint)
containers = [
{u'count': 0, u'bytes': 0, u'name': self.container}]
self.register_uris([dict(method='GET', uri=endpoint, complete_qs=True,
json=containers)])
ret = self.cloud.list_containers()
self.assert_calls()
for a, b in zip(containers, ret):
self._compare_containers(a, b)
def test_list_containers_exception(self):
endpoint = '{endpoint}/'.format(
endpoint=self.endpoint)
self.register_uris([dict(method='GET', uri=endpoint, complete_qs=True,
status_code=416)])
self.assertRaises(
exc.OpenStackCloudException, self.cloud.list_containers)
self.assert_calls()
@mock.patch('time.time', autospec=True)
def test_generate_form_signature_container_key(self, mock_time):
mock_time.return_value = 12345
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
headers={
'Content-Length': '0',
'X-Container-Object-Count': '0',
'Accept-Ranges': 'bytes',
'X-Storage-Policy': 'Policy-0',
'Date': 'Fri, 16 Dec 2016 18:29:05 GMT',
'X-Timestamp': '1481912480.41664',
'X-Trans-Id': 'tx60ec128d9dbf44b9add68-0058543271dfw1',
'X-Container-Bytes-Used': '0',
'X-Container-Meta-Temp-Url-Key': 'amazingly-secure-key',
'Content-Type': 'text/plain; charset=utf-8'})
])
self.assertEqual(
(13345, '60731fb66d46c97cdcb79b6154363179c500b9d9'),
self.cloud.object_store.generate_form_signature(
self.container,
object_prefix='prefix/location',
redirect_url='https://example.com/location',
max_file_size=1024 * 1024 * 1024,
max_upload_count=10, timeout=1000, temp_url_key=None))
self.assert_calls()
@mock.patch('time.time', autospec=True)
def test_generate_form_signature_account_key(self, mock_time):
mock_time.return_value = 12345
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
headers={
'Content-Length': '0',
'X-Container-Object-Count': '0',
'Accept-Ranges': 'bytes',
'X-Storage-Policy': 'Policy-0',
'Date': 'Fri, 16 Dec 2016 18:29:05 GMT',
'X-Timestamp': '1481912480.41664',
'X-Trans-Id': 'tx60ec128d9dbf44b9add68-0058543271dfw1',
'X-Container-Bytes-Used': '0',
'Content-Type': 'text/plain; charset=utf-8'}),
dict(method='HEAD', uri=self.endpoint + '/',
headers={
'X-Account-Meta-Temp-Url-Key': 'amazingly-secure-key'}),
])
self.assertEqual(
(13345, '3cb9bc83d5a4136421bb2c1f58b963740566646f'),
self.cloud.object_store.generate_form_signature(
self.container,
object_prefix='prefix/location',
redirect_url='https://example.com/location',
max_file_size=1024 * 1024 * 1024,
max_upload_count=10, timeout=1000, temp_url_key=None))
self.assert_calls()
@mock.patch('time.time')
def test_generate_form_signature_key_argument(self, mock_time):
mock_time.return_value = 12345
self.assertEqual(
(13345, '1c283a05c6628274b732212d9a885265e6f67b63'),
self.cloud.object_store.generate_form_signature(
self.container,
object_prefix='prefix/location',
redirect_url='https://example.com/location',
max_file_size=1024 * 1024 * 1024,
max_upload_count=10, timeout=1000,
temp_url_key='amazingly-secure-key'))
self.assert_calls()
def test_generate_form_signature_no_key(self):
self.register_uris([
dict(method='HEAD', uri=self.container_endpoint,
headers={
'Content-Length': '0',
'X-Container-Object-Count': '0',
'Accept-Ranges': 'bytes',
'X-Storage-Policy': 'Policy-0',
'Date': 'Fri, 16 Dec 2016 18:29:05 GMT',
'X-Timestamp': '1481912480.41664',
'X-Trans-Id': 'tx60ec128d9dbf44b9add68-0058543271dfw1',
'X-Container-Bytes-Used': '0',
'Content-Type': 'text/plain; charset=utf-8'}),
dict(method='HEAD', uri=self.endpoint + '/',
headers={}),
])
self.assertRaises(
exceptions.SDKException,
self.cloud.object_store.generate_form_signature,
self.container,
object_prefix='prefix/location',
redirect_url='https://example.com/location',
max_file_size=1024 * 1024 * 1024,
max_upload_count=10, timeout=1000, temp_url_key=None)
self.assert_calls()
def test_set_account_temp_url_key(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.endpoint + '/',
status_code=204,
validate=dict(
headers={
'x-account-meta-temp-url-key': key})),
dict(method='HEAD', uri=self.endpoint + '/',
headers={
'x-account-meta-temp-url-key': key}),
])
self.cloud.object_store.set_account_temp_url_key(key)
self.assert_calls()
def test_set_account_temp_url_key_secondary(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.endpoint + '/',
status_code=204,
validate=dict(
headers={
'x-account-meta-temp-url-key-2': key})),
dict(method='HEAD', uri=self.endpoint + '/',
headers={
'x-account-meta-temp-url-key-2': key}),
])
self.cloud.object_store.set_account_temp_url_key(key, secondary=True)
self.assert_calls()
def test_set_container_temp_url_key(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=204,
validate=dict(
headers={
'x-container-meta-temp-url-key': key})),
dict(method='HEAD', uri=self.container_endpoint,
headers={
'x-container-meta-temp-url-key': key}),
])
self.cloud.object_store.set_container_temp_url_key(self.container, key)
self.assert_calls()
def test_set_container_temp_url_key_secondary(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=204,
validate=dict(
headers={
'x-container-meta-temp-url-key-2': key})),
dict(method='HEAD', uri=self.container_endpoint,
headers={
'x-container-meta-temp-url-key-2': key}),
])
self.cloud.object_store.set_container_temp_url_key(
self.container, key, secondary=True)
self.assert_calls()
def test_list_objects(self):
endpoint = '{endpoint}?format=json'.format(
endpoint=self.container_endpoint)
objects = [{
u'bytes': 20304400896,
u'last_modified': u'2016-12-15T13:34:13.650090',
u'hash': u'daaf9ed2106d09bba96cf193d866445e',
u'name': self.object,
u'content_type': u'application/octet-stream'}]
self.register_uris([dict(method='GET', uri=endpoint, complete_qs=True,
json=objects)])
ret = self.cloud.list_objects(self.container)
self.assert_calls()
for a, b in zip(objects, ret):
self._compare_objects(a, b)
def test_list_objects_with_prefix(self):
endpoint = '{endpoint}?format=json&prefix=test'.format(
endpoint=self.container_endpoint)
objects = [{
u'bytes': 20304400896,
u'last_modified': u'2016-12-15T13:34:13.650090',
u'hash': u'daaf9ed2106d09bba96cf193d866445e',
u'name': self.object,
u'content_type': u'application/octet-stream'}]
self.register_uris([dict(method='GET', uri=endpoint, complete_qs=True,
json=objects)])
ret = self.cloud.list_objects(self.container, prefix='test')
self.assert_calls()
for a, b in zip(objects, ret):
self._compare_objects(a, b)
def test_list_objects_exception(self):
endpoint = '{endpoint}?format=json'.format(
endpoint=self.container_endpoint)
self.register_uris([dict(method='GET', uri=endpoint, complete_qs=True,
status_code=416)])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.list_objects, self.container)
self.assert_calls()
def test_delete_object(self):
self.register_uris([
dict(method='HEAD', uri=self.object_endpoint,
headers={'X-Object-Meta': 'foo'}),
dict(method='DELETE', uri=self.object_endpoint, status_code=204)])
self.assertTrue(self.cloud.delete_object(self.container, self.object))
self.assert_calls()
def test_delete_object_not_found(self):
self.register_uris([dict(method='HEAD', uri=self.object_endpoint,
status_code=404)])
self.assertFalse(self.cloud.delete_object(self.container, self.object))
self.assert_calls()
def test_get_object(self):
headers = {
'Content-Length': '20304400896',
'Content-Type': 'application/octet-stream',
'Accept-Ranges': 'bytes',
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
'X-Timestamp': '1481808853.65009',
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
'X-Static-Large-Object': 'True',
'X-Object-Meta-Mtime': '1481513709.168512',
}
response_headers = {k.lower(): v for k, v in headers.items()}
text = 'test body'
self.register_uris([
dict(method='GET', uri=self.object_endpoint,
headers={
'Content-Length': '20304400896',
'Content-Type': 'application/octet-stream',
'Accept-Ranges': 'bytes',
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
'X-Timestamp': '1481808853.65009',
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
'X-Static-Large-Object': 'True',
'X-Object-Meta-Mtime': '1481513709.168512',
},
text='test body')])
resp = self.cloud.get_object(self.container, self.object)
self.assert_calls()
self.assertEqual((response_headers, text), resp)
def test_stream_object(self):
text = b'test body'
self.register_uris([
dict(method='GET', uri=self.object_endpoint,
headers={
'Content-Length': '20304400896',
'Content-Type': 'application/octet-stream',
'Accept-Ranges': 'bytes',
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
'X-Timestamp': '1481808853.65009',
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
'X-Static-Large-Object': 'True',
'X-Object-Meta-Mtime': '1481513709.168512',
},
text='test body')])
response_text = b''
for data in self.cloud.stream_object(self.container, self.object):
response_text += data
self.assert_calls()
self.assertEqual(text, response_text)
def test_stream_object_not_found(self):
self.register_uris([
dict(method='GET', uri=self.object_endpoint, status_code=404),
])
response_text = b''
for data in self.cloud.stream_object(self.container, self.object):
response_text += data
self.assert_calls()
self.assertEqual(b'', response_text)
def test_get_object_not_found(self):
self.register_uris([dict(method='GET',
uri=self.object_endpoint, status_code=404)])
self.assertIsNone(self.cloud.get_object(self.container, self.object))
self.assert_calls()
def test_get_object_exception(self):
self.register_uris([dict(method='GET', uri=self.object_endpoint,
status_code=416)])
self.assertRaises(
openstack.cloud.OpenStackCloudException,
self.cloud.get_object,
self.container, self.object)
self.assert_calls()
def test_get_object_segment_size_below_min(self):
# Register directly becuase we make multiple calls. The number
# of calls we make isn't interesting - what we do with the return
# values is. Don't run assert_calls for the same reason.
self.register_uris([
dict(method='GET', uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': 1000},
slo={'min_segment_size': 500}),
headers={'Content-Type': 'application/json'})])
self.assertEqual(500, self.cloud.get_object_segment_size(400))
self.assertEqual(900, self.cloud.get_object_segment_size(900))
self.assertEqual(1000, self.cloud.get_object_segment_size(1000))
self.assertEqual(1000, self.cloud.get_object_segment_size(1100))
def test_get_object_segment_size_http_404(self):
self.register_uris([
dict(method='GET', uri='https://object-store.example.com/info',
status_code=404, reason='Not Found')])
self.assertEqual(_proxy.DEFAULT_OBJECT_SEGMENT_SIZE,
self.cloud.get_object_segment_size(None))
self.assert_calls()
def test_get_object_segment_size_http_412(self):
self.register_uris([
dict(method='GET', uri='https://object-store.example.com/info',
status_code=412, reason='Precondition failed')])
self.assertEqual(
_proxy.DEFAULT_OBJECT_SEGMENT_SIZE,
self.cloud.get_object_segment_size(None))
self.assert_calls()
class TestObjectUploads(BaseTestObject):
def setUp(self):
super(TestObjectUploads, self).setUp()
self.content = self.getUniqueString().encode('latin-1')
self.object_file = tempfile.NamedTemporaryFile(delete=False)
self.object_file.write(self.content)
self.object_file.close()
(self.md5, self.sha256) = utils._get_file_hashes(
self.object_file.name)
self.endpoint = self.cloud._object_store_client.get_endpoint()
def test_create_object(self):
self.register_uris([
dict(method='GET',
uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': 1000},
slo={'min_segment_size': 500})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint, container=self.container,
object=self.object),
status_code=404),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
}))
])
self.cloud.create_object(
container=self.container, name=self.object,
filename=self.object_file.name)
self.assert_calls()
def test_create_directory_marker_object(self):
self.register_uris([
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(
headers={
'content-type': 'application/directory',
}))
])
self.cloud.create_directory_marker_object(
container=self.container, name=self.object)
self.assert_calls()
def test_create_dynamic_large_object(self):
max_file_size = 2
min_file_size = 1
uris_to_mock = [
dict(method='GET',
uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': max_file_size},
slo={'min_segment_size': min_file_size})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint, container=self.container,
object=self.object),
status_code=404)
]
uris_to_mock.extend(
[dict(method='PUT',
uri='{endpoint}/{container}/{object}/{index:0>6}'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object,
index=index),
status_code=201)
for index, offset in enumerate(
range(0, len(self.content), max_file_size))]
)
uris_to_mock.append(
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(
headers={
'x-object-manifest': '{container}/{object}'.format(
container=self.container, object=self.object),
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})))
self.register_uris(uris_to_mock)
self.cloud.create_object(
container=self.container, name=self.object,
filename=self.object_file.name, use_slo=False)
# After call 3, order become indeterminate because of thread pool
self.assert_calls(stop_after=3)
for key, value in self.calls[-1]['headers'].items():
self.assertEqual(
value, self.adapter.request_history[-1].headers[key],
'header mismatch in manifest call')
def test_create_static_large_object(self):
max_file_size = 25
min_file_size = 1
uris_to_mock = [
dict(method='GET', uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': max_file_size},
slo={'min_segment_size': min_file_size})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=404)
]
uris_to_mock.extend([
dict(method='PUT',
uri='{endpoint}/{container}/{object}/{index:0>6}'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object,
index=index),
status_code=201,
headers=dict(Etag='etag{index}'.format(index=index)))
for index, offset in enumerate(
range(0, len(self.content), max_file_size))
])
uris_to_mock.append(
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})))
self.register_uris(uris_to_mock)
self.cloud.create_object(
container=self.container, name=self.object,
filename=self.object_file.name, use_slo=True)
# After call 3, order become indeterminate because of thread pool
self.assert_calls(stop_after=3)
for key, value in self.calls[-1]['headers'].items():
self.assertEqual(
value, self.adapter.request_history[-1].headers[key],
'header mismatch in manifest call')
base_object = '/{container}/{object}'.format(
container=self.container,
object=self.object)
self.assertEqual([
{
'path': "{base_object}/000000".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag0',
},
{
'path': "{base_object}/000001".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag1',
},
{
'path': "{base_object}/000002".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag2',
},
{
'path': "{base_object}/000003".format(
base_object=base_object),
'size_bytes': len(self.object) - 75,
'etag': 'etag3',
},
], self.adapter.request_history[-1].json())
def test_slo_manifest_retry(self):
"""
Uploading the SLO manifest file should be retried up to 3 times before
giving up. This test should succeed on the 3rd and final attempt.
"""
max_file_size = 25
min_file_size = 1
uris_to_mock = [
dict(method='GET', uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': max_file_size},
slo={'min_segment_size': min_file_size})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=404)
]
uris_to_mock.extend([
dict(method='PUT',
uri='{endpoint}/{container}/{object}/{index:0>6}'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object,
index=index),
status_code=201,
headers=dict(Etag='etag{index}'.format(index=index)))
for index, offset in enumerate(
range(0, len(self.content), max_file_size))
])
# manifest file upload calls
uris_to_mock.extend([
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=400,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=400,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})),
])
self.register_uris(uris_to_mock)
self.cloud.create_object(
container=self.container, name=self.object,
filename=self.object_file.name, use_slo=True)
# After call 3, order become indeterminate because of thread pool
self.assert_calls(stop_after=3)
for key, value in self.calls[-1]['headers'].items():
self.assertEqual(
value, self.adapter.request_history[-1].headers[key],
'header mismatch in manifest call')
base_object = '/{container}/{object}'.format(
container=self.container,
object=self.object)
self.assertEqual([
{
'path': "{base_object}/000000".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag0',
},
{
'path': "{base_object}/000001".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag1',
},
{
'path': "{base_object}/000002".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag2',
},
{
'path': "{base_object}/000003".format(
base_object=base_object),
'size_bytes': len(self.object) - 75,
'etag': 'etag3',
},
], self.adapter.request_history[-1].json())
def test_slo_manifest_fail(self):
"""
Uploading the SLO manifest file should be retried up to 3 times before
giving up. This test fails all 3 attempts and should verify that we
delete uploaded segments that begin with the object prefix.
"""
max_file_size = 25
min_file_size = 1
uris_to_mock = [
dict(method='GET', uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': max_file_size},
slo={'min_segment_size': min_file_size})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=404)
]
uris_to_mock.extend([
dict(method='PUT',
uri='{endpoint}/{container}/{object}/{index:0>6}'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object,
index=index),
status_code=201,
headers=dict(Etag='etag{index}'.format(index=index)))
for index, offset in enumerate(
range(0, len(self.content), max_file_size))
])
# manifest file upload calls
uris_to_mock.extend([
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=400,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=400,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=400,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
})),
])
# Cleaning up image upload segments involves calling the
# delete_autocreated_image_objects() API method which will list
# objects (LIST), get the object metadata (HEAD), then delete the
# object (DELETE).
uris_to_mock.extend([
dict(method='GET',
uri='{endpoint}/images?format=json&prefix={prefix}'.format(
endpoint=self.endpoint,
prefix=self.object),
complete_qs=True,
json=[{
'content_type': 'application/octet-stream',
'bytes': 1437258240,
'hash': '249219347276c331b87bf1ac2152d9af',
'last_modified': '2015-02-16T17:50:05.289600',
'name': self.object
}]),
dict(method='HEAD',
uri='{endpoint}/images/{object}'.format(
endpoint=self.endpoint,
object=self.object),
headers={
'X-Timestamp': '1429036140.50253',
'X-Trans-Id': 'txbbb825960a3243b49a36f-005a0dadaedfw1',
'Content-Length': '1290170880',
'Last-Modified': 'Tue, 14 Apr 2015 18:29:01 GMT',
'X-Object-Meta-x-sdk-autocreated': 'true',
'X-Object-Meta-X-Shade-Sha256': 'does not matter',
'X-Object-Meta-X-Shade-Md5': 'does not matter',
'Date': 'Thu, 16 Nov 2017 15:24:30 GMT',
'Accept-Ranges': 'bytes',
'X-Static-Large-Object': 'false',
'Content-Type': 'application/octet-stream',
'Etag': '249219347276c331b87bf1ac2152d9af',
}),
dict(method='DELETE',
uri='{endpoint}/images/{object}'.format(
endpoint=self.endpoint, object=self.object))
])
self.register_uris(uris_to_mock)
# image_api_use_tasks needs to be set to True in order for the API
# method delete_autocreated_image_objects() to do the cleanup.
self.cloud.image_api_use_tasks = True
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.create_object,
container=self.container, name=self.object,
filename=self.object_file.name, use_slo=True)
# After call 3, order become indeterminate because of thread pool
self.assert_calls(stop_after=3)
def test_object_segment_retry_failure(self):
max_file_size = 25
min_file_size = 1
self.register_uris([
dict(method='GET', uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': max_file_size},
slo={'min_segment_size': min_file_size})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=404),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000000'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
status_code=201),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000001'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
status_code=201),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000002'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
status_code=201),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000003'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
status_code=501),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201)
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.create_object,
container=self.container, name=self.object,
filename=self.object_file.name, use_slo=True)
# After call 3, order become indeterminate because of thread pool
self.assert_calls(stop_after=3)
def test_object_segment_retries(self):
max_file_size = 25
min_file_size = 1
self.register_uris([
dict(method='GET', uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': max_file_size},
slo={'min_segment_size': min_file_size})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=404),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000000'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
headers={'etag': 'etag0'},
status_code=201),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000001'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
headers={'etag': 'etag1'},
status_code=201),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000002'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
headers={'etag': 'etag2'},
status_code=201),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000003'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
status_code=501),
dict(method='PUT',
uri='{endpoint}/{container}/{object}/000003'.format(
endpoint=self.endpoint,
container=self.container,
object=self.object),
status_code=201,
headers={'etag': 'etag3'}),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(
params={
'multipart-manifest', 'put'
},
headers={
'x-object-meta-x-sdk-md5': self.md5,
'x-object-meta-x-sdk-sha256': self.sha256,
}))
])
self.cloud.create_object(
container=self.container, name=self.object,
filename=self.object_file.name, use_slo=True)
# After call 3, order become indeterminate because of thread pool
self.assert_calls(stop_after=3)
for key, value in self.calls[-1]['headers'].items():
self.assertEqual(
value, self.adapter.request_history[-1].headers[key],
'header mismatch in manifest call')
base_object = '/{container}/{object}'.format(
container=self.container,
object=self.object)
self.assertEqual([
{
'path': "{base_object}/000000".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag0',
},
{
'path': "{base_object}/000001".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag1',
},
{
'path': "{base_object}/000002".format(
base_object=base_object),
'size_bytes': 25,
'etag': 'etag2',
},
{
'path': "{base_object}/000003".format(
base_object=base_object),
'size_bytes': len(self.object) - 75,
'etag': 'etag3',
},
], self.adapter.request_history[-1].json())
def test_create_object_skip_checksum(self):
self.register_uris([
dict(method='GET',
uri='https://object-store.example.com/info',
json=dict(
swift={'max_file_size': 1000},
slo={'min_segment_size': 500})),
dict(method='HEAD',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint, container=self.container,
object=self.object),
status_code=200),
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(headers={})),
])
self.cloud.create_object(
container=self.container, name=self.object,
filename=self.object_file.name,
generate_checksums=False)
self.assert_calls()
def test_create_object_data(self):
self.register_uris([
dict(method='PUT',
uri='{endpoint}/{container}/{object}'.format(
endpoint=self.endpoint,
container=self.container, object=self.object),
status_code=201,
validate=dict(
headers={},
data=self.content,
)),
])
self.cloud.create_object(
container=self.container, name=self.object,
data=self.content)
self.assert_calls()
| 39.225587 | 79 | 0.522628 | 5,107 | 51,817 | 5.14862 | 0.089289 | 0.055374 | 0.048528 | 0.040542 | 0.844223 | 0.821784 | 0.801894 | 0.774131 | 0.764547 | 0.740739 | 0 | 0.050857 | 0.360596 | 51,817 | 1,320 | 80 | 39.255303 | 0.742756 | 0.047398 | 0 | 0.765082 | 0 | 0 | 0.172473 | 0.074657 | 0 | 0 | 0 | 0 | 0.072212 | 1 | 0.046618 | false | 0 | 0.010969 | 0 | 0.060329 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0aefc1ac37d3d6b7f458f753ec03090413f30871 | 1,232 | py | Python | player_if.py | MrFlow23/PiMusicDisplay | c1db640cb5dfaaa1725e26225295c31bfa6d9358 | [
"MIT"
] | null | null | null | player_if.py | MrFlow23/PiMusicDisplay | c1db640cb5dfaaa1725e26225295c31bfa6d9358 | [
"MIT"
] | null | null | null | player_if.py | MrFlow23/PiMusicDisplay | c1db640cb5dfaaa1725e26225295c31bfa6d9358 | [
"MIT"
] | null | null | null |
class Player_IF():
#def __init__(self, name):
# self.name = name
def isPlaying(self):
raise NotImplementedError("Subclass must implement abstract method")
def getSongAttributes(self):
# return artistName, albumName, songName, songLength_ms in this order
raise NotImplementedError("Subclass must implement abstract method")
# currently not used
def getSongPosition():
raise NotImplementedError("Subclass must implement abstract method")
def getCoverImage(self, coverLoader):
raise NotImplementedError("Subclass must implement abstract method")
def play(self):
raise NotImplementedError("Subclass must implement abstract method")
def pause(self):
raise NotImplementedError("Subclass must implement abstract method")
def stop(self):
raise NotImplementedError("Subclass must implement abstract method")
def next(self):
raise NotImplementedError("Subclass must implement abstract method")
def prev(self):
raise NotImplementedError("Subclass must implement abstract method")
def getPlayerNamer(self):
raise NotImplementedError("Subclass must implement abstract method")
| 32.421053 | 81 | 0.709416 | 122 | 1,232 | 7.114754 | 0.303279 | 0.276498 | 0.368664 | 0.414747 | 0.739631 | 0.739631 | 0.739631 | 0.671659 | 0.456221 | 0 | 0 | 0 | 0.219968 | 1,232 | 37 | 82 | 33.297297 | 0.903226 | 0.106331 | 0 | 0.47619 | 0 | 0 | 0.368272 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.47619 | false | 0 | 0 | 0 | 0.52381 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
7c7ae0b954f68444bcc5df10a81f8f446b565d9c | 19,926 | py | Python | tests/components/test_proximity.py | davidedmundson/home-assistant | cd02563552ffc28239fa17c79a5d9bc0013bd5ac | [
"MIT"
] | null | null | null | tests/components/test_proximity.py | davidedmundson/home-assistant | cd02563552ffc28239fa17c79a5d9bc0013bd5ac | [
"MIT"
] | null | null | null | tests/components/test_proximity.py | davidedmundson/home-assistant | cd02563552ffc28239fa17c79a5d9bc0013bd5ac | [
"MIT"
] | 1 | 2018-11-20T17:44:08.000Z | 2018-11-20T17:44:08.000Z | """
tests.components.test_proximity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests proximity component.
"""
from homeassistant.components import proximity
from tests.common import get_test_home_assistant
class TestProximity:
""" Test the Proximity component. """
def setup_method(self, method):
self.hass = get_test_home_assistant()
self.hass.states.set(
'zone.home', 'zoning',
{
'name': 'home',
'latitude': 2.1,
'longitude': 1.1,
'radius': 10
})
def teardown_method(self, method):
""" Stop down stuff we started. """
self.hass.stop()
def test_proximity(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
},
'tolerance': '1'
}
})
state = self.hass.states.get('proximity.home')
assert state.state == 'not set'
assert state.attributes.get('nearest') == 'not set'
assert state.attributes.get('dir_of_travel') == 'not set'
self.hass.states.set('proximity.home', '0')
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.state == '0'
def test_no_devices_in_config(self):
assert not proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'tolerance': '1'
}
})
def test_no_tolerance_in_config(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
}
}
})
def test_no_ignored_zones_in_config(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'devices': {
'device_tracker.test1',
'device_tracker.test2'
},
'tolerance': '1'
}
})
def test_no_zone_in_config(self):
assert proximity.setup(self.hass, {
'proximity': {
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
},
'tolerance': '1'
}
})
def test_device_tracker_test1_in_zone(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1'
},
'tolerance': '1'
}
})
self.hass.states.set(
'device_tracker.test1', 'home',
{
'friendly_name': 'test1',
'latitude': 2.1,
'longitude': 1.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.state == '0'
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'arrived'
def test_device_trackers_in_zone(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
},
'tolerance': '1'
}
})
self.hass.states.set(
'device_tracker.test1', 'home',
{
'friendly_name': 'test1',
'latitude': 2.1,
'longitude': 1.1
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test2', 'home',
{
'friendly_name': 'test2',
'latitude': 2.1,
'longitude': 1.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.state == '0'
assert ((state.attributes.get('nearest') == 'test1, test2') or
(state.attributes.get('nearest') == 'test2, test1'))
assert state.attributes.get('dir_of_travel') == 'arrived'
def test_device_tracker_test1_away(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1'
},
'tolerance': '1'
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
def test_device_tracker_test1_awayfurther(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1'
}
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 40.1,
'longitude': 20.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'away_from'
def test_device_tracker_test1_awaycloser(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1'
}
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 40.1,
'longitude': 20.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'towards'
def test_all_device_trackers_in_ignored_zone(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1'
}
}
})
self.hass.states.set(
'device_tracker.test1', 'work',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.state == 'not set'
assert state.attributes.get('nearest') == 'not set'
assert state.attributes.get('dir_of_travel') == 'not set'
def test_device_tracker_test1_no_coordinates(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1'
},
'tolerance': '1'
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'not set'
assert state.attributes.get('dir_of_travel') == 'not set'
def test_device_tracker_test1_awayfurther_than_test2_first_test1(self):
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2'
})
self.hass.pool.block_till_done()
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
}
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2',
'latitude': 40.1,
'longitude': 20.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
def test_device_tracker_test1_awayfurther_than_test2_first_test2(self):
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2'
})
self.hass.pool.block_till_done()
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
}
}
})
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2',
'latitude': 40.1,
'longitude': 20.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test2'
assert state.attributes.get('dir_of_travel') == 'unknown'
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
def test_device_tracker_test1_awayfurther_test2_in_ignored_zone(self):
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test2', 'work',
{
'friendly_name': 'test2'
})
self.hass.pool.block_till_done()
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
}
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
def test_device_tracker_test1_awayfurther_test2_first(self):
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2'
})
self.hass.pool.block_till_done()
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
}
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 10.1,
'longitude': 5.1
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 40.1,
'longitude': 20.1
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 35.1,
'longitude': 15.1
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test1', 'work',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test2'
assert state.attributes.get('dir_of_travel') == 'unknown'
def test_device_tracker_test1_awayfurther_a_bit(self):
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1'
},
'tolerance': 1000
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1000001,
'longitude': 10.1000001
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1000002,
'longitude': 10.1000002
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'stationary'
def test_device_tracker_test1_nearest_after_test2_in_ignored_zone(self):
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1'
})
self.hass.pool.block_till_done()
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2'
})
self.hass.pool.block_till_done()
assert proximity.setup(self.hass, {
'proximity': {
'zone': 'home',
'ignored_zones': {
'work'
},
'devices': {
'device_tracker.test1',
'device_tracker.test2'
}
}
})
self.hass.states.set(
'device_tracker.test1', 'not_home',
{
'friendly_name': 'test1',
'latitude': 20.1,
'longitude': 10.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
self.hass.states.set(
'device_tracker.test2', 'not_home',
{
'friendly_name': 'test2',
'latitude': 10.1,
'longitude': 5.1
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test2'
assert state.attributes.get('dir_of_travel') == 'unknown'
self.hass.states.set(
'device_tracker.test2', 'work',
{
'friendly_name': 'test2',
'latitude': 12.6,
'longitude': 7.6
})
self.hass.pool.block_till_done()
state = self.hass.states.get('proximity.home')
assert state.attributes.get('nearest') == 'test1'
assert state.attributes.get('dir_of_travel') == 'unknown'
| 32.19063 | 76 | 0.46497 | 1,779 | 19,926 | 5.005059 | 0.05059 | 0.103324 | 0.092767 | 0.113208 | 0.924079 | 0.907008 | 0.90465 | 0.90465 | 0.90465 | 0.894766 | 0 | 0.025754 | 0.405651 | 19,926 | 618 | 77 | 32.242718 | 0.726083 | 0.007578 | 0 | 0.764499 | 0 | 0 | 0.22202 | 0 | 0 | 0 | 0 | 0 | 0.114236 | 1 | 0.035149 | false | 0 | 0.003515 | 0 | 0.040422 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7c7e2c79ad0a633d5cdd49f82df01c75d43f7844 | 18,248 | py | Python | tests/test_StdErrToHTMLConverter.py | eoyilmaz/stalker_pyramid | e7a35a7d4ce786ac9795e25662c130c1b1303848 | [
"MIT"
] | 13 | 2017-05-21T17:44:07.000Z | 2021-12-18T15:26:13.000Z | tests/test_StdErrToHTMLConverter.py | eoyilmaz/stalker_pyramid | e7a35a7d4ce786ac9795e25662c130c1b1303848 | [
"MIT"
] | 5 | 2017-06-14T04:21:22.000Z | 2021-01-29T14:20:28.000Z | tests/test_StdErrToHTMLConverter.py | eoyilmaz/stalker_pyramid | e7a35a7d4ce786ac9795e25662c130c1b1303848 | [
"MIT"
] | 3 | 2017-06-10T09:23:36.000Z | 2018-10-16T17:41:22.000Z | # -*- coding: utf-8 -*-
import unittest
from stalker_pyramid.views import StdErrToHTMLConverter
class StdErrToHTMLConverterTestCase(unittest.TestCase):
"""tests the stalker_pyramid.views.StdErrToHTMLConverter class
"""
def test_list_input(self):
"""testing if the class is working with lists as the error message
"""
test_data = [
'/tmp/Stalker_3coLKi.tjp:1909: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1359.Task_1356.Task_1357 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1949: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1367.Task_1370.Task_1371 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1989: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1368.Task_1369.Task_1375 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:2029: \x1b[35mWarning: The total effort (2.0d or 18.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1381.Task_1391.Task_1394 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:2070: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1382.Task_1392.Task_1393 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1320: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following tasks stole resources from Task_108.Task_109.Asset_130.Task_605 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1735: \x1b[34mInfo: Task Task_108.Task_109.Asset_581.Task_583\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1762: \x1b[34mInfo: Task Task_108.Task_109.Asset_585.Task_587\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1797: \x1b[34mInfo: Task Task_108.Task_109.Asset_589.Task_591\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1442: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following task stole resources from Task_108.Task_109.Asset_135.Task_552 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1398: \x1b[34mInfo: Task Task_108.Task_109.Asset_133.Task_545\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1450: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following tasks stole resources from Task_108.Task_109.Asset_135.Task_553 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1743: \x1b[34mInfo: Task Task_108.Task_109.Asset_581.Task_584\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1485: \x1b[34mInfo: Task Task_108.Task_109.Asset_136.Task_558\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1770: \x1b[34mInfo: Task Task_108.Task_109.Asset_585.Task_588\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1805: \x1b[34mInfo: Task Task_108.Task_109.Asset_589.Task_598\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1485: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following task stole resources from Task_108.Task_109.Asset_136.Task_558 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1743: \x1b[34mInfo: Task Task_108.Task_109.Asset_581.Task_584\x1b[0m\n'
]
expected_result = \
'<p>/tmp/Stalker_3coLKi.tjp:1909: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1359.Task_1356.Task_1357 exceeds the specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1949: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1367.Task_1370.Task_1371 exceeds the specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1989: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1368.Task_1369.Task_1375 exceeds the specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:2029: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> The total effort (2.0d or 18.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1381.Task_1391.Task_1394 exceeds the specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:2070: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1382.Task_1392.Task_1393 exceeds the specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1320: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following tasks stole resources from Task_108.Task_109.Asset_130.Task_605 despite having a lower priority:</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1735: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_581.Task_583</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1762: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_585.Task_587</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1797: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_589.Task_591</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1442: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following task stole resources from Task_108.Task_109.Asset_135.Task_552 despite having a lower priority:</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1398: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_133.Task_545</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1450: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following tasks stole resources from Task_108.Task_109.Asset_135.Task_553 despite having a lower priority:</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1743: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_581.Task_584</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1485: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_136.Task_558</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1770: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_585.Task_588</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1805: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_589.Task_598</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1485: <span class="alert alert-warning" style="overflow-wrap: break-word"><strong>Warning:</strong> Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following task stole resources from Task_108.Task_109.Asset_136.Task_558 despite having a lower priority:</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1743: <span class="alert alert-info" style="overflow-wrap: break-word"><strong>Info:</strong> Task Task_108.Task_109.Asset_581.Task_584</span></p>'
c = StdErrToHTMLConverter(test_data)
self.assertEqual(
expected_result,
c.html()
)
def test_replace_links(self):
"""testing if the class is working with lists as the error message
"""
test_data = [
'/tmp/Stalker_3coLKi.tjp:1909: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1359.Task_1356.Task_1357 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1949: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1367.Task_1370.Task_1371 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1989: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1368.Task_1369.Task_1375 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:2029: \x1b[35mWarning: The total effort (2.0d or 18.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1381.Task_1391.Task_1394 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:2070: \x1b[35mWarning: The total effort (1.0d or 9.0h) of the provided bookings for task Task_108.Task_1350.Task_1351.Task_1353.Asset_1382.Task_1392.Task_1393 exceeds the specified effort of 0.1111111111111111d or 1.0h.\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1320: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following tasks stole resources from Task_108.Task_109.Asset_130.Task_605 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1735: \x1b[34mInfo: Task Task_108.Task_109.Asset_581.Task_583\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1762: \x1b[34mInfo: Task Task_108.Task_109.Asset_585.Task_587\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1797: \x1b[34mInfo: Task Task_108.Task_109.Asset_589.Task_591\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1442: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following task stole resources from Task_108.Task_109.Asset_135.Task_552 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1398: \x1b[34mInfo: Task Task_108.Task_109.Asset_133.Task_545\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1450: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following tasks stole resources from Task_108.Task_109.Asset_135.Task_553 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1743: \x1b[34mInfo: Task Task_108.Task_109.Asset_581.Task_584\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1485: \x1b[34mInfo: Task Task_108.Task_109.Asset_136.Task_558\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1770: \x1b[34mInfo: Task Task_108.Task_109.Asset_585.Task_588\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1805: \x1b[34mInfo: Task Task_108.Task_109.Asset_589.Task_598\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1485: \x1b[35mWarning: Due to a mix of ALAP and ASAP scheduled tasks or a dependency on a lower priority tasks the following task stole resources from Task_108.Task_109.Asset_136.Task_558 despite having a lower priority:\x1b[0m\n',
'/tmp/Stalker_3coLKi.tjp:1743: \x1b[34mInfo: Task Task_108.Task_109.Asset_581.Task_584\x1b[0m\n'
]
expected_result = \
'<p>/tmp/Stalker_3coLKi.tjp:1909: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'The total effort (1.0d or 9.0h) of the provided bookings for ' \
'task <a href="/tasks/1357/view">Task_1357</a> exceeds the ' \
'specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1949: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'The total effort (1.0d or 9.0h) of the provided bookings for ' \
'task <a href="/tasks/1371/view">Task_1371</a> exceeds the ' \
'specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1989: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'The total effort (1.0d or 9.0h) of the provided bookings for ' \
'task <a href="/tasks/1375/view">Task_1375</a> exceeds the ' \
'specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:2029: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'The total effort (2.0d or 18.0h) of the provided bookings for ' \
'task <a href="/tasks/1394/view">Task_1394</a> exceeds the ' \
'specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:2070: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'The total effort (1.0d or 9.0h) of the provided bookings for ' \
'task <a href="/tasks/1393/view">Task_1393</a> exceeds the ' \
'specified effort of 0.1111111111111111d or 1.0h.</span><br>' \
'/tmp/Stalker_3coLKi.tjp:1320: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'Due to a mix of ALAP and ASAP scheduled tasks or a dependency ' \
'on a lower priority tasks the following tasks stole resources ' \
'from <a href="/tasks/605/view">Task_605</a> despite having a ' \
'lower priority:</span><br>/tmp/Stalker_3coLKi.tjp:1735: ' \
'<span class="alert alert-info" style="overflow-wrap: ' \
'break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/583/view">Task_583</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1762: <span class="alert alert-info" ' \
'style="overflow-wrap: break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/587/view">Task_587</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1797: <span class="alert alert-info" ' \
'style="overflow-wrap: break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/591/view">Task_591</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1442: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> Due ' \
'to a mix of ALAP and ASAP scheduled tasks or a dependency on a ' \
'lower priority tasks the following task stole resources from ' \
'<a href="/tasks/552/view">Task_552</a> despite having a lower ' \
'priority:</span><br>/tmp/Stalker_3coLKi.tjp:1398: ' \
'<span class="alert alert-info" style="overflow-wrap: ' \
'break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/545/view">Task_545</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1450: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'Due to a mix of ALAP and ASAP scheduled tasks or a dependency ' \
'on a lower priority tasks the following tasks stole resources ' \
'from <a href="/tasks/553/view">Task_553</a> despite having a ' \
'lower priority:</span><br>/tmp/Stalker_3coLKi.tjp:1743: ' \
'<span class="alert alert-info" style="overflow-wrap: ' \
'break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/584/view">Task_584</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1485: <span class="alert alert-info" ' \
'style="overflow-wrap: break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/558/view">Task_558</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1770: <span class="alert alert-info" ' \
'style="overflow-wrap: break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/588/view">Task_588</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1805: <span class="alert alert-info" ' \
'style="overflow-wrap: break-word"><strong>Info:</strong> Task ' \
'<a href="/tasks/598/view">Task_598</a></span><br>' \
'/tmp/Stalker_3coLKi.tjp:1485: <span class="alert alert-warning" ' \
'style="overflow-wrap: break-word"><strong>Warning:</strong> ' \
'Due to a mix of ALAP and ASAP scheduled tasks or a dependency ' \
'on a lower priority tasks the following task stole resources ' \
'from <a href="/tasks/558/view">Task_558</a> despite having a ' \
'lower priority:</span><br>/tmp/Stalker_3coLKi.tjp:1743: <span ' \
'class="alert alert-info" style="overflow-wrap: break-word">' \
'<strong>Info:</strong> Task ' \
'<a href="/tasks/584/view">Task_584</a>' \
'</span></p>'
c = StdErrToHTMLConverter(test_data)
self.maxDiff = None
self.assertEqual(
expected_result,
c.html(replace_links=True)
)
| 107.976331 | 350 | 0.686431 | 2,818 | 18,248 | 4.315117 | 0.054294 | 0.059211 | 0.094737 | 0.1125 | 0.963405 | 0.963405 | 0.958141 | 0.951974 | 0.949589 | 0.946711 | 0 | 0.133431 | 0.180239 | 18,248 | 168 | 351 | 108.619048 | 0.679457 | 0.012769 | 0 | 0.51634 | 0 | 0.352941 | 0.842325 | 0.423579 | 0 | 0 | 0 | 0 | 0.013072 | 1 | 0.013072 | false | 0 | 0.013072 | 0 | 0.03268 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
7c835d25a6e7f52cfba04d2cd728950f8780d5bd | 27,338 | py | Python | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_isis_act.py | Maikor/ydk-py | b86c4a7c570ae3b2c5557d098420446df5de4929 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_isis_act.py | Maikor/ydk-py | b86c4a7c570ae3b2c5557d098420446df5de4929 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_isis_act.py | Maikor/ydk-py | b86c4a7c570ae3b2c5557d098420446df5de4929 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | """ Cisco_IOS_XR_isis_act
This module contains a collection of YANG definitions
for Cisco IOS\-XR ISIS action package configuration.
Copyright (c) 2016\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class ClearIsisProcess(Entity):
"""
Clear all IS\-IS data structures
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisProcess.Input>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisProcess, self).__init__()
self._top_entity = None
self.yang_name = "clear-isis-process"
self.yang_parent_name = "Cisco-IOS-XR-isis-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = ClearIsisProcess.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-process"
self._is_frozen = True
class Input(Entity):
"""
.. attribute:: instance
Clear data from single IS\-IS instance
**type**\: :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisProcess.Input.Instance>`
.. attribute:: process
Clear all IS\-IS data structures
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisProcess.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "clear-isis-process"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("instance", ("instance", ClearIsisProcess.Input.Instance))])
self._leafs = OrderedDict([
('process', (YLeaf(YType.empty, 'process'), ['Empty'])),
])
self.process = None
self.instance = ClearIsisProcess.Input.Instance()
self.instance.parent = self
self._children_name_map["instance"] = "instance"
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-process/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisProcess.Input, ['process'], name, value)
class Instance(Entity):
"""
Clear data from single IS\-IS instance
.. attribute:: instance_identifier
IS\-IS process instance identifier
**type**\: str
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisProcess.Input.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "input"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_identifier', (YLeaf(YType.str, 'instance-identifier'), ['str'])),
])
self.instance_identifier = None
self._segment_path = lambda: "instance"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-process/input/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisProcess.Input.Instance, ['instance_identifier'], name, value)
def clone_ptr(self):
self._top_entity = ClearIsisProcess()
return self._top_entity
class ClearIsisRoute(Entity):
"""
Clear IS\-IS routes
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisRoute.Input>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisRoute, self).__init__()
self._top_entity = None
self.yang_name = "clear-isis-route"
self.yang_parent_name = "Cisco-IOS-XR-isis-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = ClearIsisRoute.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-route"
self._is_frozen = True
class Input(Entity):
"""
.. attribute:: instance
Clear data from single IS\-IS instance
**type**\: :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisRoute.Input.Instance>`
.. attribute:: route
Clear IS\-IS routes
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisRoute.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "clear-isis-route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("instance", ("instance", ClearIsisRoute.Input.Instance))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.empty, 'route'), ['Empty'])),
])
self.route = None
self.instance = ClearIsisRoute.Input.Instance()
self.instance.parent = self
self._children_name_map["instance"] = "instance"
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisRoute.Input, ['route'], name, value)
class Instance(Entity):
"""
Clear data from single IS\-IS instance
.. attribute:: instance_identifier
IS\-IS process instance identifier
**type**\: str
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisRoute.Input.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "input"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_identifier', (YLeaf(YType.str, 'instance-identifier'), ['str'])),
])
self.instance_identifier = None
self._segment_path = lambda: "instance"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-route/input/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisRoute.Input.Instance, ['instance_identifier'], name, value)
def clone_ptr(self):
self._top_entity = ClearIsisRoute()
return self._top_entity
class ClearIsisStat(Entity):
"""
Clear IS\-IS protocol statistics
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisStat.Input>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisStat, self).__init__()
self._top_entity = None
self.yang_name = "clear-isis-stat"
self.yang_parent_name = "Cisco-IOS-XR-isis-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = ClearIsisStat.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-stat"
self._is_frozen = True
class Input(Entity):
"""
.. attribute:: instance
Clear data from single IS\-IS instance
**type**\: :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisStat.Input.Instance>`
.. attribute:: statistics
Clear IS\-IS protocol statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisStat.Input.Statistics>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisStat.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "clear-isis-stat"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("instance", ("instance", ClearIsisStat.Input.Instance)), ("statistics", ("statistics", ClearIsisStat.Input.Statistics))])
self._leafs = OrderedDict()
self.instance = ClearIsisStat.Input.Instance()
self.instance.parent = self
self._children_name_map["instance"] = "instance"
self.statistics = ClearIsisStat.Input.Statistics()
self.statistics.parent = self
self._children_name_map["statistics"] = "statistics"
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-stat/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisStat.Input, [], name, value)
class Instance(Entity):
"""
Clear data from single IS\-IS instance
.. attribute:: instance_identifier
IS\-IS process instance identifier
**type**\: str
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisStat.Input.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "input"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_identifier', (YLeaf(YType.str, 'instance-identifier'), ['str'])),
])
self.instance_identifier = None
self._segment_path = lambda: "instance"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-stat/input/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisStat.Input.Instance, ['instance_identifier'], name, value)
class Statistics(Entity):
"""
Clear IS\-IS protocol statistics
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**mandatory**\: True
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisStat.Input.Statistics, self).__init__()
self.yang_name = "statistics"
self.yang_parent_name = "input"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
])
self.interface_name = None
self._segment_path = lambda: "statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-stat/input/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisStat.Input.Statistics, ['interface_name'], name, value)
def clone_ptr(self):
self._top_entity = ClearIsisStat()
return self._top_entity
class ClearIsisDist(Entity):
"""
Reset BGP\-LS topology distribution
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisDist.Input>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisDist, self).__init__()
self._top_entity = None
self.yang_name = "clear-isis-dist"
self.yang_parent_name = "Cisco-IOS-XR-isis-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = ClearIsisDist.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-dist"
self._is_frozen = True
class Input(Entity):
"""
.. attribute:: instance
Reset BGP\-LS topology from single IS\-IS instance
**type**\: :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisDist.Input.Instance>`
.. attribute:: distribution
Reset BGP\-LS topology distribution
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisDist.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "clear-isis-dist"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("instance", ("instance", ClearIsisDist.Input.Instance))])
self._leafs = OrderedDict([
('distribution', (YLeaf(YType.empty, 'distribution'), ['Empty'])),
])
self.distribution = None
self.instance = ClearIsisDist.Input.Instance()
self.instance.parent = self
self._children_name_map["instance"] = "instance"
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-dist/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisDist.Input, ['distribution'], name, value)
class Instance(Entity):
"""
Reset BGP\-LS topology from single IS\-IS instance
.. attribute:: instance_identifier
IS\-IS process instance identifier
**type**\: str
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisDist.Input.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "input"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_identifier', (YLeaf(YType.str, 'instance-identifier'), ['str'])),
])
self.instance_identifier = None
self._segment_path = lambda: "instance"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-dist/input/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisDist.Input.Instance, ['instance_identifier'], name, value)
def clone_ptr(self):
self._top_entity = ClearIsisDist()
return self._top_entity
class ClearIsisLocalLsp(Entity):
"""
Clean and regenerate local LSPs
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisLocalLsp.Input>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisLocalLsp, self).__init__()
self._top_entity = None
self.yang_name = "clear-isis-local-lsp"
self.yang_parent_name = "Cisco-IOS-XR-isis-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = ClearIsisLocalLsp.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-local-lsp"
self._is_frozen = True
class Input(Entity):
"""
.. attribute:: instance
Clean and regenerate local LSPs from single IS\-IS instance
**type**\: :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsisLocalLsp.Input.Instance>`
.. attribute:: local_lsp
Clean and regenerate local LSPs
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisLocalLsp.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "clear-isis-local-lsp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("instance", ("instance", ClearIsisLocalLsp.Input.Instance))])
self._leafs = OrderedDict([
('local_lsp', (YLeaf(YType.empty, 'local-lsp'), ['Empty'])),
])
self.local_lsp = None
self.instance = ClearIsisLocalLsp.Input.Instance()
self.instance.parent = self
self._children_name_map["instance"] = "instance"
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-local-lsp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisLocalLsp.Input, ['local_lsp'], name, value)
class Instance(Entity):
"""
Clean and regenerate local LSPs from single IS\-IS instance
.. attribute:: instance_identifier
IS\-IS process instance identifier
**type**\: str
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsisLocalLsp.Input.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "input"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_identifier', (YLeaf(YType.str, 'instance-identifier'), ['str'])),
])
self.instance_identifier = None
self._segment_path = lambda: "instance"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis-local-lsp/input/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsisLocalLsp.Input.Instance, ['instance_identifier'], name, value)
def clone_ptr(self):
self._top_entity = ClearIsisLocalLsp()
return self._top_entity
class ClearIsis(Entity):
"""
Clear IS\-IS data structures
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsis.Input>`
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsis, self).__init__()
self._top_entity = None
self.yang_name = "clear-isis"
self.yang_parent_name = "Cisco-IOS-XR-isis-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = ClearIsis.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis"
self._is_frozen = True
class Input(Entity):
"""
.. attribute:: instance
Clear data from single IS\-IS instance
**type**\: :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsis.Input.Instance>`
.. attribute:: rt_type
Clear data for these route types
**type**\: :py:class:`RtType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act.ClearIsis.Input.RtType>`
.. attribute:: route
Clear IS\-IS routes
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: topology
Topology table information
**type**\: str
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsis.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "clear-isis"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("instance", ("instance", ClearIsis.Input.Instance))])
self._leafs = OrderedDict([
('rt_type', (YLeaf(YType.enumeration, 'rt-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsis', 'Input.RtType')])),
('route', (YLeaf(YType.empty, 'route'), ['Empty'])),
('topology', (YLeaf(YType.str, 'topology'), ['str'])),
])
self.rt_type = None
self.route = None
self.topology = None
self.instance = ClearIsis.Input.Instance()
self.instance.parent = self
self._children_name_map["instance"] = "instance"
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsis.Input, ['rt_type', 'route', 'topology'], name, value)
class RtType(Enum):
"""
RtType (Enum Class)
Clear data for these route types
.. data:: AFI_ALL_MULTICAST = 0
.. data:: AFI_ALL_SAFI_ALL = 1
.. data:: AFI_ALL_UNICAST = 2
.. data:: IPv4_MULTICAST = 3
.. data:: IPv4_SAFI_ALL = 4
.. data:: IPv4_UNICAST = 5
.. data:: IPv6_MULTICAST = 6
.. data:: IPv6_SAFI_ALL = 7
.. data:: IPv6_UNICAST = 8
"""
AFI_ALL_MULTICAST = Enum.YLeaf(0, "AFI-ALL-MULTICAST")
AFI_ALL_SAFI_ALL = Enum.YLeaf(1, "AFI-ALL-SAFI-ALL")
AFI_ALL_UNICAST = Enum.YLeaf(2, "AFI-ALL-UNICAST")
IPv4_MULTICAST = Enum.YLeaf(3, "IPv4-MULTICAST")
IPv4_SAFI_ALL = Enum.YLeaf(4, "IPv4-SAFI-ALL")
IPv4_UNICAST = Enum.YLeaf(5, "IPv4-UNICAST")
IPv6_MULTICAST = Enum.YLeaf(6, "IPv6-MULTICAST")
IPv6_SAFI_ALL = Enum.YLeaf(7, "IPv6-SAFI-ALL")
IPv6_UNICAST = Enum.YLeaf(8, "IPv6-UNICAST")
class Instance(Entity):
"""
Clear data from single IS\-IS instance
.. attribute:: instance_identifier
IS\-IS process instance identifier
**type**\: str
"""
_prefix = 'isis-act'
_revision = '2016-06-30'
def __init__(self):
super(ClearIsis.Input.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "input"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_identifier', (YLeaf(YType.str, 'instance-identifier'), ['str'])),
])
self.instance_identifier = None
self._segment_path = lambda: "instance"
self._absolute_path = lambda: "Cisco-IOS-XR-isis-act:clear-isis/input/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(ClearIsis.Input.Instance, ['instance_identifier'], name, value)
def clone_ptr(self):
self._top_entity = ClearIsis()
return self._top_entity
| 32.276269 | 169 | 0.560465 | 2,836 | 27,338 | 5.088505 | 0.055007 | 0.029104 | 0.039498 | 0.040746 | 0.855242 | 0.798489 | 0.779988 | 0.77479 | 0.77479 | 0.762941 | 0 | 0.010825 | 0.324164 | 27,338 | 846 | 170 | 32.314421 | 0.770242 | 0.172068 | 0 | 0.706856 | 0 | 0.002364 | 0.131822 | 0.04654 | 0 | 0 | 0 | 0 | 0 | 1 | 0.089835 | false | 0 | 0.01182 | 0 | 0.191489 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7c9c15336779984d3aa25ef853f6173464f33f99 | 113 | py | Python | gym_turtlebot3/envs/__init__.py | EmanuelSamir/gym-turtlebot3 | 674d432310337f91ee01407247dd1e80a7f7413b | [
"MIT"
] | 6 | 2019-08-07T06:35:24.000Z | 2022-03-18T08:05:50.000Z | gym_turtlebot3/envs/__init__.py | EmanuelSamir/gym-turtlebot3 | 674d432310337f91ee01407247dd1e80a7f7413b | [
"MIT"
] | null | null | null | gym_turtlebot3/envs/__init__.py | EmanuelSamir/gym-turtlebot3 | 674d432310337f91ee01407247dd1e80a7f7413b | [
"MIT"
] | 4 | 2019-12-30T03:08:41.000Z | 2021-09-14T13:49:05.000Z | from gym_turtlebot3.envs.respawnGoal import Respawn
from gym_turtlebot3.envs.turtlebot3_env import TurtleBot3Env
| 37.666667 | 60 | 0.893805 | 15 | 113 | 6.533333 | 0.6 | 0.142857 | 0.346939 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038095 | 0.070796 | 113 | 2 | 61 | 56.5 | 0.895238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
7ccedd44f33d2ab01951fa9ea5119d63981b69d5 | 89 | py | Python | test/tokenize/t28.py | timmartin/skulpt | 2e3a3fbbaccc12baa29094a717ceec491a8a6750 | [
"MIT"
] | 2,671 | 2015-01-03T08:23:25.000Z | 2022-03-31T06:15:48.000Z | test/tokenize/t28.py | csev/skulpt | 9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f | [
"MIT"
] | 972 | 2015-01-05T08:11:00.000Z | 2022-03-29T13:47:15.000Z | test/tokenize/t28.py | csev/skulpt | 9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f | [
"MIT"
] | 845 | 2015-01-03T19:53:36.000Z | 2022-03-29T18:34:22.000Z | if 1 < 1 > 1 == 1 >= 5 <= 0x15 <= 0x12 != 1 and 5 in 1 not in 1 is 1 or 5 is not 1: pass
| 44.5 | 88 | 0.505618 | 24 | 89 | 1.875 | 0.458333 | 0.133333 | 0.133333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.310345 | 0.348315 | 89 | 1 | 89 | 89 | 0.465517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089888 | 0 | 0 | 1 | 0 | true | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
7cd09937d21007f8c8dad071d9716398354910b6 | 7,120 | py | Python | emotions/anime.py | csitsociety/corporeal-bit | 727a133836aa582f9afb9558986ed4db678f78cb | [
"MIT"
] | null | null | null | emotions/anime.py | csitsociety/corporeal-bit | 727a133836aa582f9afb9558986ed4db678f78cb | [
"MIT"
] | null | null | null | emotions/anime.py | csitsociety/corporeal-bit | 727a133836aa582f9afb9558986ed4db678f78cb | [
"MIT"
] | null | null | null | import numpy as np
from PIL import Image
from luma.core.sprite_system import framerate_regulator
_frames = [
np.asarray(dtype=np.dtype('uint8'), a=[
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,1,1,1,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,0,1,1,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
]),
np.asarray(dtype=np.dtype('uint8'), a=[
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,1,1,1,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,0,1,1,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
]),
]
def get_frames():
frames = []
for i in range(len(_frames)):
frames.append(Image.fromarray(_frames[i]*255, 'L').convert('1'))
return frames
def play(d, event):
while not event.is_set():
for i in range(len(frames)):
if not event.is_set():
with regulator:
d.display(frames[i])
frames = get_frames()
regulator = framerate_regulator(fps=2)
| 120.677966 | 204 | 0.503652 | 3,287 | 7,120 | 1.087922 | 0.01369 | 1.180089 | 1.676175 | 2.114094 | 0.921141 | 0.921141 | 0.909955 | 0.909955 | 0.909955 | 0.909955 | 0 | 0.46217 | 0.025421 | 7,120 | 58 | 205 | 122.758621 | 0.053178 | 0 | 0 | 0.185185 | 0 | 0 | 0.001685 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037037 | false | 0 | 0.055556 | 0 | 0.111111 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 |
6b306e99d01b029d73291dc62ac11711738c437b | 97,519 | py | Python | sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_mongo_db_resources_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 1 | 2021-09-07T18:35:49.000Z | 2021-09-07T18:35:49.000Z | sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_mongo_db_resources_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 4 | 2019-04-17T17:57:49.000Z | 2020-04-24T21:11:22.000Z | sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_mongo_db_resources_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 2 | 2021-05-23T16:46:31.000Z | 2021-05-26T23:51:09.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class MongoDBResourcesOperations(object):
"""MongoDBResourcesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.cosmosdb.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_mongo_db_databases(
self,
resource_group_name, # type: str
account_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.MongoDBDatabaseListResult"]
"""Lists the MongoDB databases under an existing Azure Cosmos DB database account.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MongoDBDatabaseListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.cosmosdb.models.MongoDBDatabaseListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MongoDBDatabaseListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_mongo_db_databases.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('MongoDBDatabaseListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_mongo_db_databases.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases'} # type: ignore
def get_mongo_db_database(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.MongoDBDatabaseGetResults"
"""Gets the MongoDB databases under an existing Azure Cosmos DB database account with the provided
name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MongoDBDatabaseGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.MongoDBDatabaseGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MongoDBDatabaseGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self.get_mongo_db_database.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('MongoDBDatabaseGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_mongo_db_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'} # type: ignore
def _create_update_mongo_db_database_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
create_update_mongo_db_database_parameters, # type: "_models.MongoDBDatabaseCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.MongoDBDatabaseGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.MongoDBDatabaseGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_update_mongo_db_database_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(create_update_mongo_db_database_parameters, 'MongoDBDatabaseCreateUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('MongoDBDatabaseGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_update_mongo_db_database_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'} # type: ignore
def begin_create_update_mongo_db_database(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
create_update_mongo_db_database_parameters, # type: "_models.MongoDBDatabaseCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.MongoDBDatabaseGetResults"]
"""Create or updates Azure Cosmos DB MongoDB database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param create_update_mongo_db_database_parameters: The parameters to provide for the current
MongoDB database.
:type create_update_mongo_db_database_parameters: ~azure.mgmt.cosmosdb.models.MongoDBDatabaseCreateUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either MongoDBDatabaseGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.MongoDBDatabaseGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.MongoDBDatabaseGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_update_mongo_db_database_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
create_update_mongo_db_database_parameters=create_update_mongo_db_database_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('MongoDBDatabaseGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_update_mongo_db_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'} # type: ignore
def _delete_mongo_db_database_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
# Construct URL
url = self._delete_mongo_db_database_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_mongo_db_database_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'} # type: ignore
def begin_delete_mongo_db_database(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes an existing Azure Cosmos DB MongoDB database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_mongo_db_database_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_mongo_db_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'} # type: ignore
def get_mongo_db_database_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ThroughputSettingsGetResults"
"""Gets the RUs per second of the MongoDB database under an existing Azure Cosmos DB database
account with the provided name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ThroughputSettingsGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self.get_mongo_db_database_throughput.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_mongo_db_database_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default'} # type: ignore
def _update_mongo_db_database_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
update_throughput_parameters, # type: "_models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_mongo_db_database_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(update_throughput_parameters, 'ThroughputSettingsUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponseUpdatedFormat, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_mongo_db_database_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default'} # type: ignore
def begin_update_mongo_db_database_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
update_throughput_parameters, # type: "_models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ThroughputSettingsGetResults"]
"""Update RUs per second of the an Azure Cosmos DB MongoDB database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param update_throughput_parameters: The RUs per second of the parameters to provide for the
current MongoDB database.
:type update_throughput_parameters: ~azure.mgmt.cosmosdb.models.ThroughputSettingsUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_mongo_db_database_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
update_throughput_parameters=update_throughput_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_mongo_db_database_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default'} # type: ignore
def _migrate_mongo_db_database_to_autoscale_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self._migrate_mongo_db_database_to_autoscale_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponseUpdatedFormat, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_mongo_db_database_to_autoscale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def begin_migrate_mongo_db_database_to_autoscale(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB MongoDB database from manual throughput to autoscale.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_mongo_db_database_to_autoscale_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_mongo_db_database_to_autoscale.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def _migrate_mongo_db_database_to_manual_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self._migrate_mongo_db_database_to_manual_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponseUpdatedFormat, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_mongo_db_database_to_manual_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
def begin_migrate_mongo_db_database_to_manual_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB MongoDB database from autoscale to manual throughput.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_mongo_db_database_to_manual_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_mongo_db_database_to_manual_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
def list_mongo_db_collections(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.MongoDBCollectionListResult"]
"""Lists the MongoDB collection under an existing Azure Cosmos DB database account.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MongoDBCollectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.cosmosdb.models.MongoDBCollectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MongoDBCollectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_mongo_db_collections.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('MongoDBCollectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_mongo_db_collections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections'} # type: ignore
def get_mongo_db_collection(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.MongoDBCollectionGetResults"
"""Gets the MongoDB collection under an existing Azure Cosmos DB database account.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param collection_name: Cosmos DB collection name.
:type collection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MongoDBCollectionGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.MongoDBCollectionGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MongoDBCollectionGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self.get_mongo_db_collection.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('MongoDBCollectionGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_mongo_db_collection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'} # type: ignore
def _create_update_mongo_db_collection_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
create_update_mongo_db_collection_parameters, # type: "_models.MongoDBCollectionCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.MongoDBCollectionGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.MongoDBCollectionGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_update_mongo_db_collection_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(create_update_mongo_db_collection_parameters, 'MongoDBCollectionCreateUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('MongoDBCollectionGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_update_mongo_db_collection_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'} # type: ignore
def begin_create_update_mongo_db_collection(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
create_update_mongo_db_collection_parameters, # type: "_models.MongoDBCollectionCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.MongoDBCollectionGetResults"]
"""Create or update an Azure Cosmos DB MongoDB Collection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param collection_name: Cosmos DB collection name.
:type collection_name: str
:param create_update_mongo_db_collection_parameters: The parameters to provide for the current
MongoDB Collection.
:type create_update_mongo_db_collection_parameters: ~azure.mgmt.cosmosdb.models.MongoDBCollectionCreateUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either MongoDBCollectionGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.MongoDBCollectionGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.MongoDBCollectionGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_update_mongo_db_collection_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
collection_name=collection_name,
create_update_mongo_db_collection_parameters=create_update_mongo_db_collection_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('MongoDBCollectionGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_update_mongo_db_collection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'} # type: ignore
def _delete_mongo_db_collection_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
# Construct URL
url = self._delete_mongo_db_collection_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_mongo_db_collection_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'} # type: ignore
def begin_delete_mongo_db_collection(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes an existing Azure Cosmos DB MongoDB Collection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param collection_name: Cosmos DB collection name.
:type collection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_mongo_db_collection_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
collection_name=collection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_mongo_db_collection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'} # type: ignore
def get_mongo_db_collection_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ThroughputSettingsGetResults"
"""Gets the RUs per second of the MongoDB collection under an existing Azure Cosmos DB database
account with the provided name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param collection_name: Cosmos DB collection name.
:type collection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ThroughputSettingsGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self.get_mongo_db_collection_throughput.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_mongo_db_collection_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default'} # type: ignore
def _update_mongo_db_collection_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
update_throughput_parameters, # type: "_models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_mongo_db_collection_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(update_throughput_parameters, 'ThroughputSettingsUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_mongo_db_collection_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default'} # type: ignore
def begin_update_mongo_db_collection_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
update_throughput_parameters, # type: "_models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ThroughputSettingsGetResults"]
"""Update the RUs per second of an Azure Cosmos DB MongoDB collection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param collection_name: Cosmos DB collection name.
:type collection_name: str
:param update_throughput_parameters: The RUs per second of the parameters to provide for the
current MongoDB collection.
:type update_throughput_parameters: ~azure.mgmt.cosmosdb.models.ThroughputSettingsUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_mongo_db_collection_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
collection_name=collection_name,
update_throughput_parameters=update_throughput_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_mongo_db_collection_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default'} # type: ignore
def _migrate_mongo_db_collection_to_autoscale_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self._migrate_mongo_db_collection_to_autoscale_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponseUpdatedFormat, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_mongo_db_collection_to_autoscale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def begin_migrate_mongo_db_collection_to_autoscale(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB MongoDB collection from manual throughput to autoscale.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param collection_name: Cosmos DB collection name.
:type collection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_mongo_db_collection_to_autoscale_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
collection_name=collection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_mongo_db_collection_to_autoscale.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def _migrate_mongo_db_collection_to_manual_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-15"
accept = "application/json"
# Construct URL
url = self._migrate_mongo_db_collection_to_manual_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponseUpdatedFormat, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_mongo_db_collection_to_manual_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
def begin_migrate_mongo_db_collection_to_manual_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
collection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB MongoDB collection from autoscale to manual throughput.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param collection_name: Cosmos DB collection name.
:type collection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_mongo_db_collection_to_manual_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
collection_name=collection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'collectionName': self._serialize.url("collection_name", collection_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_mongo_db_collection_to_manual_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
| 54.785955 | 342 | 0.676002 | 10,594 | 97,519 | 5.968001 | 0.02879 | 0.031665 | 0.034955 | 0.019739 | 0.965615 | 0.96435 | 0.958118 | 0.956062 | 0.953689 | 0.947631 | 0 | 0.008692 | 0.220214 | 97,519 | 1,779 | 343 | 54.816751 | 0.822734 | 0.247285 | 0 | 0.849706 | 0 | 0.020991 | 0.182623 | 0.100591 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036104 | false | 0 | 0.009236 | 0 | 0.107473 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86360e1b6b3453b15a67ed1041dc8813fefb890f | 1,734 | py | Python | prt_ascii.py | phatollie/MensaQ | 567619e96ed4f75cddc40bdabb847bd5a9cad0dc | [
"MIT"
] | 1 | 2021-06-11T20:46:04.000Z | 2021-06-11T20:46:04.000Z | prt_ascii.py | phatollie/MensaQ | 567619e96ed4f75cddc40bdabb847bd5a9cad0dc | [
"MIT"
] | null | null | null | prt_ascii.py | phatollie/MensaQ | 567619e96ed4f75cddc40bdabb847bd5a9cad0dc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
###############################################
# Authored by Justin Acevedo in the year 2021 #
###############################################
"""
Description: A simple method to print ASCII art file as the loading screen for the mensaq game.
Design: Create a function to print ASCII art.
"""
import os
import platform
if platform.uname().system == "Windows":
os.system('cls')
else:
os.system('clear')
def prt_ascii():
"""
Prints the ASCII art for the word MENSA.
"""
print('')
print("\033[1;33;40m \n") # Set text color to Yellow.
print(' ___ ___ ___ ___ ___ ')
print(' /\__\ /\ \ /\__\ /\ \ /\ \ ')
print(' /::| | /::\ \ /::| | /::\ \ /::\ \ ')
print(' /:|:| | /:/\:\ \ /:|:| | /:/\ \ \ /:/\:\ \ ')
print(' /:/|:|__|__ /::\~\:\ \ /:/|:| |__ _\:\~\ \ \ /::\~\:\ \ ')
print(' /:/ |::::\__\ /:/\:\ \:\__\ /:/ |:| /\__\ /\ \:\ \ \__\ /:/\:\ \:\__\ ')
print(' \/__/~~/:/ / \:\~\:\ \/__/ \/__|:|/:/ / \:\ \:\ \/__/ \/__\:\/:/ / ')
print(' /:/ / \:\ \:\__\ |:/:/ / \:\ \:\__\ \::/ / ')
print(' /:/ / \:\ \/__/ |::/ / \:\/:/ / /:/ / ')
print(' /:/ / \:\__\ /:/ / \::/ / /:/ / ')
print(' \/__/ \/__/ \/__/ \/__/ \/__/ ')
print('')
print("\033[0;37;40m \n", flush=True) # Set text color back to white.
if __name__ == '__main__':
prt_ascii()
| 39.409091 | 97 | 0.299308 | 106 | 1,734 | 4.198113 | 0.575472 | 0.292135 | 0.370787 | 0.449438 | 0.146067 | 0.146067 | 0.146067 | 0.146067 | 0.146067 | 0.146067 | 0 | 0.020154 | 0.399077 | 1,734 | 43 | 98 | 40.325581 | 0.40691 | 0.178201 | 0 | 0.083333 | 0 | 0.083333 | 0.680952 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | true | 0 | 0.083333 | 0 | 0.125 | 0.625 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
8695483b81c94f0923276fa65f89aa2a965cc2ef | 12,751 | py | Python | tests/syntax/simple_expression/test_operator.py | TangYuan-Liu/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | [
"Apache-2.0"
] | 1 | 2022-03-05T02:59:21.000Z | 2022-03-05T02:59:21.000Z | tests/syntax/simple_expression/test_operator.py | TangYuan-Liu/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | [
"Apache-2.0"
] | null | null | null | tests/syntax/simple_expression/test_operator.py | TangYuan-Liu/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import sys
import pytest
from mindspore import Tensor, context, Parameter
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.nn import Cell
import mindspore as ms
def test_inner_scalar_divisor():
"""
Feature: Check whether the divisor of inner scalar is zero.
Description: The divisor of inner scalar must not be zero.
Expectation: The divisor of inner scalar must not be zero.
"""
class Net(Cell):
def __init__(self):
super().__init__()
self.param_a = Parameter(Tensor(5, ms.int32), name="param_a")
self.param_b = Parameter(Tensor(5, ms.int32), name="param_b")
def construct(self, x):
return x + self.param_a + 5 / 0
context.set_context(device_target="GPU")
x = Tensor(2, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="The divisor could not be zero."):
ret = net(x)
print("ret:", ret)
def test_inner_scalar_mod():
"""
Feature: Check the input of inner scalar mod.
Description: The input of inner scalar mod must not be zero.
Expectation: The input of inner scalar mod must not be zero.
"""
class Net(Cell):
def __init__(self):
super().__init__()
self.param_a = Parameter(Tensor(5, ms.int32), name="param_a")
def construct(self, x):
return x + self.param_a + 5 % 0
x = Tensor(2, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="Cannot perform modulo operation on zero."):
ret = net(x)
print("ret:", ret)
def test_inner_scalar_mod_args_length():
"""
Feature: Check the length of input of inner scalar mod.
Description: The length of input of inner scalar mod should not less than 2.
Expectation: The length of input of inner scalar mod should not less than 2.
"""
class Net(Cell):
def __init__(self):
super().__init__()
self.param_a = Parameter(Tensor(5, ms.int32), name="param_a")
self.mod = P.Mod()
def construct(self, x):
return x + self.param_a + self.mod(5)
x = Tensor(2, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="For 'S-Prim-Mod', the size of input should be 2"):
ret = net(x)
print("ret:", ret)
def test_make_range_input_is_empty():
"""
Feature: Check the length of inputs of make_range operator.
Description: The inputs of make_range operator could not be empty.
Expectation: The inputs of make_range operator could not be empty.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range():
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="For 'range', the arguments could not be empty."):
ret = net(x, y)
print("ret:", ret)
def test_make_range_step_zero():
"""
Feature: Check the length of inputs of make_range operator.
Description: The step value of MakeRange operator could not be 0.
Expectation: The step value of MakeRange operator could not be 0.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range(1, 2, 0):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="For 'range', the argument 'step' could not be 0."):
ret = net(x, y)
print("ret:", ret)
def test_make_range_error_input_1():
"""
Feature: Check the inputs of make_range operator.
Description: If start > stop, the step need smaller than zero.
Expectation: If start > stop, the step need smaller than zero.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range(1, -1, 3):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="For 'range', while the argument 'start'"):
ret = net(x, y)
print("ret:", ret)
def test_make_range_error_input_2():
"""
Feature: Check the length of inputs of make_range operator.
Description: If start < stop, the step need greater than zero.
Expectation: If start < stop, the step need greater than zero.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range(-1, 1, -3):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="For 'range', while the argument 'start'"):
ret = net(x, y)
print("ret:", ret)
def test_make_range_input_type():
"""
Feature: Check the type of inputs of make_range operator.
Description: The type of inputs of make_range operator must be int64.
Expectation: The type of inputs of make_range operator must be int64.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range(0, 0.02):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="The type of inputs in range operator only support int64 number."):
ret = net(x, y)
print("ret:", ret)
def test_make_range_input_type_2():
"""
Feature: Check the type of inputs of make_range operator.
Description: The type of inputs of make_range operator must be int64.
Expectation: The type of inputs of make_range operator must be int64.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range(0, 1, 3.00):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="The type of inputs in range operator only support int64 number."):
ret = net(x, y)
print("ret:", ret)
def test_make_range_input_type_3():
"""
Feature: Check the type of inputs of make_range operator.
Description: The type of inputs of make_range operator must be int64.
Expectation: The type of inputs of make_range operator must be int64.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range(3.00):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="The type of inputs in range operator only support int64 number."):
ret = net(x, y)
print("ret:", ret)
def test_make_range_input_size():
"""
Feature: Check the size of inputs of make_range operator.
Description: The size of inputs of make_range operator could not exceed 3.
Expectation: The size of inputs of make_range operator could not exceed 3.
"""
class Net(Cell):
def construct(self, x, y):
for _ in range(1, 2, 3, 4):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="For 'range', the size of arguments could not exceed 3."):
ret = net(x, y)
print("ret:", ret)
def test_make_range_overflow():
"""
Feature: Check the size of inputs of range operator.
Description: The size of inputs of make_range operator could not exceed 3.
Expectation: The size of inputs of make_range operator could not exceed 3.
"""
class Net(Cell):
def construct(self, x, y):
max_index = sys.maxsize
for _ in range(max_index - 1, max_index, 3):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="Integer overflow error occurred when traversing the range."):
ret = net(x, y)
print("ret:", ret)
def test_make_range_overflow_2():
"""
Feature: Check the size of inputs of make_range operator.
Description: The size of inputs of make_range operator could not exceed 3.
Expectation: The size of inputs of make_range operator could not exceed 3.
"""
class Net(Cell):
def construct(self, x, y):
min_index = -sys.maxsize
for _ in range(min_index, min_index - 1, -3):
x += y
return x
x = Tensor(2, dtype=ms.int32)
y = Tensor(4, dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="Integer overflow error occurred when traversing the range."):
ret = net(x, y)
print("ret:", ret)
def test_typeof():
"""
Feature: Check the size of inputs of typeof operator.
Description: The size of inputs of typeof operator must be 1.
Expectation: The size of inputs of typeof operator must be 1.
"""
class Net(Cell):
def construct(self, x):
return F.typeof(x, x)
x = Tensor([2, 3, 4, 5], dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="The Typeof operator must requires 1 argument, "
"but the size of arguments is 2."):
ret = net(x)
print("ret:", ret)
def test_tuple_div():
"""
Feature: Check the size of inputs of tuple_div operator.
Description: The size of inputs of tuple_div operator must be same.
Expectation: The size of inputs of tuple_div operator must be same.
"""
class Net(Cell):
def construct(self, x, y):
return F.tuple_div(x, y)
x = (8, 14, 20)
y = (2, 2)
net = Net()
with pytest.raises(Exception, match="The size of inputs of 'tuple_div' operator must be the same"):
ret = net(x, y)
print("ret:", ret)
def test_tuple_div_type():
"""
Feature: Check the size of inputs of tuple_div operator.
Description: The type of inputs of tuple_div operator must be int64 number.
Expectation: The type of inputs of tuple_div operator must be int64 number.
"""
class Net(Cell):
def construct(self, x, y):
return F.tuple_div(x, y)
x = (8, 14, 20)
y = (2, 2, 2.0)
net = Net()
with pytest.raises(Exception, match="The data type of inputs of 'tuple_div' operator should be an int64 number,"):
ret = net(x, y)
print("ret:", ret)
def test_tuple_div_zero():
"""
Feature: Check the size of inputs of tuple_div operator.
Description: The divisor value should not be 0.
Expectation: The divisor value should not be 0.
"""
class Net(Cell):
def construct(self, x, y):
return F.tuple_div(x, y)
x = (8, 14, 20)
y = (2, 2, 0)
net = Net()
with pytest.raises(Exception, match="The divisor value should not be 0"):
ret = net(x, y)
print("ret:", ret)
def test_tuple_div_input_is_not_divisible():
"""
Feature: Check whether the inputs of tuple_div is divisible.
Description: The inputs of tuple_div could be divisible.
Expectation: The inputs of tuple_div could be divisible.
"""
class Net(Cell):
def construct(self, x, y):
return F.tuple_div(x, y)
x = (8, 14)
y = (2, 3)
net = Net()
with pytest.raises(Exception, match="The inputs of 'tuple_div' operator should be divisible,"):
ret = net(x, y)
print("ret:", ret)
def test_make_slice_scalar():
"""
Feature: Check whether the scalar input of make_slice is int or bool.
Description: The scalar input of make_slice is int or bool.
Expectation: The scalar input of make_slice is int or bool.
"""
class Net(Cell):
def construct(self, data):
return data[1.01:None:None]
x = Tensor((8, 10, 12), dtype=ms.int32)
net = Net()
with pytest.raises(Exception, match="Slice indices must be integers or bool."):
ret = net(x)
print("ret:", ret)
| 31.957393 | 118 | 0.610854 | 1,860 | 12,751 | 4.096774 | 0.102688 | 0.011024 | 0.043307 | 0.051312 | 0.826903 | 0.816142 | 0.795538 | 0.753412 | 0.728346 | 0.693045 | 0 | 0.023607 | 0.279115 | 12,751 | 398 | 119 | 32.037688 | 0.805374 | 0.333621 | 0 | 0.704846 | 0 | 0 | 0.13572 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.180617 | false | 0 | 0.030837 | 0.039648 | 0.378855 | 0.0837 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86e8fdfaf1e67602f13c9cb9287703aca7ae2df8 | 24,033 | py | Python | nlp_tasks/absa/aspect_category_detection_and_sentiment_classification/acd_and_sc_data_reader.py | l294265421/SCAN | 796708bff1526eb3a6bd9946cc5fa6732fb9010d | [
"MIT"
] | 15 | 2020-10-19T08:19:21.000Z | 2021-12-05T12:33:30.000Z | nlp_tasks/absa/aspect_category_detection_and_sentiment_classification/acd_and_sc_data_reader.py | l294265421/SCAN | 796708bff1526eb3a6bd9946cc5fa6732fb9010d | [
"MIT"
] | null | null | null | nlp_tasks/absa/aspect_category_detection_and_sentiment_classification/acd_and_sc_data_reader.py | l294265421/SCAN | 796708bff1526eb3a6bd9946cc5fa6732fb9010d | [
"MIT"
] | 7 | 2020-09-18T03:22:49.000Z | 2022-01-21T14:18:12.000Z | # -*- coding: utf-8 -*-
import copy
import re
from typing import *
from overrides import overrides
import pickle
import copy
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from allennlp.modules.seq2vec_encoders import Seq2VecEncoder, PytorchSeq2VecWrapper
from allennlp.nn.util import get_text_field_mask
from allennlp.models import Model
from allennlp.modules.text_field_embedders import TextFieldEmbedder
from allennlp.data.fields import TextField, MetadataField, ArrayField, ListField, LabelField, MultiLabelField
from allennlp.data.token_indexers import SingleIdTokenIndexer
from allennlp.data.token_indexers import TokenIndexer
from allennlp.data import Instance
from allennlp.data.token_indexers import TokenIndexer
from allennlp.data.tokenizers import Token
from allennlp.nn import util as nn_util
from allennlp.data.vocabulary import Vocabulary
from allennlp.data.dataset_readers import DatasetReader
import torch.nn.functional as F
from allennlp.training import metrics
from allennlp.models import BasicClassifier
from allennlp.data.iterators import DataIterator
from tqdm import tqdm
import spacy
from nltk.corpus import stopwords
from benepar.spacy_plugin import BeneparComponent
english_stop_words = stopwords.words('english')
english_stop_words.extend([',', '.', '?', ';', '-', ':', '\'', '"', '(', ')', '!'])
from nlp_tasks.utils import corenlp_factory
from nlp_tasks.utils import create_graph
from nlp_tasks.utils import my_corenlp
from nlp_tasks.absa.sentence_analysis.constituency_parser import ConstituencyTreeNode
class TextInAllAspectSentimentOut(DatasetReader):
def __init__(self, categories: List[str], polarities: List[str],
tokenizer: Callable[[str], List[str]] = lambda x: x.split(),
token_indexers: Dict[str, TokenIndexer] = None,
position_indexers: Dict[str, TokenIndexer] = None,
aspect_indexers: Dict[str, TokenIndexer] = None,
core_nlp: my_corenlp.StanfordCoreNLP=None,
configuration=None) -> None:
super().__init__(lazy=False)
self.tokenizer = tokenizer
self.token_indexers = token_indexers or {"tokens": SingleIdTokenIndexer(namespace="tokens")}
self.position_indexers = position_indexers or {"position": SingleIdTokenIndexer(namespace='position')}
self.aspect_indexers = aspect_indexers or {"aspect": SingleIdTokenIndexer(namespace='aspect')}
self.categories = categories
self.polarities = polarities
self.spacy_nlp = spacy.load("en_core_web_sm")
self.core_nlp = core_nlp
self.configuration = configuration
def _build_graph(self, text):
graph = create_graph.create_dependency_graph_for_dgl(text, self.spacy_nlp, None)
return graph
@overrides
def text_to_instance(self, sample: list) -> Instance:
fields = {}
text: str = sample[0].strip()
words = self.tokenizer(text)
if 'max_word_len' in self.configuration:
words = words[: self.configuration['max_word_len']]
sample.append(words)
graph = self._build_graph(text)
sample.append(graph)
tokens = [Token(word) for word in words]
sentence_field = TextField(tokens, self.token_indexers)
fields['tokens'] = sentence_field
position = [Token(str(i)) for i in range(len(tokens))]
position_field = TextField(position, self.position_indexers)
fields['position'] = position_field
aspects = [Token(category) for category in self.categories]
aspect_field = TextField(aspects, self.aspect_indexers)
fields['aspects'] = aspect_field
category_labels = [0] * len(self.categories)
polarity_labels = [-100] * len(self.categories)
total_labels = []
if len(sample) > 1:
labels: list = sample[1]
for label in labels:
category_labels[label[0]] = 1
polarity_labels[label[0]] = label[1]
for i in range(len(self.categories)):
if polarity_labels[i] == -100:
total_labels.append(0)
else:
total_labels.append(polarity_labels[i] + category_labels[i])
label_field = ArrayField(np.array(category_labels + polarity_labels + total_labels))
fields["label"] = label_field
polarity_mask = [1 if polarity_labels[i] != -100 else 0 for i in range(len(self.categories))]
polarity_mask_field = ArrayField(np.array(polarity_mask))
fields['polarity_mask'] = polarity_mask_field
# stop_word_labels = [1 if word in english_stop_words else 0 for word in words]
# stop_word_num = sum(stop_word_labels)
# stop_word_labels = [label / stop_word_num for label in stop_word_labels]
# sample.append(stop_word_labels)
sample_field = MetadataField(sample)
fields["sample"] = sample_field
return Instance(fields)
@overrides
def _read(self, samples: list) -> Iterator[Instance]:
acd_sc_mode = self.configuration['acd_sc_mode']
if acd_sc_mode == 'multi-multi':
for sample in samples:
yield self.text_to_instance(sample)
elif acd_sc_mode == 'multi-single':
for sample in samples:
text = sample[0]
labels = sample[1]
for i in range(len(labels)):
labels_copy = [list(e) for e in copy.deepcopy(labels)]
for j, label in enumerate(labels_copy):
if j != i:
labels_copy[j][1] = -100
yield self.text_to_instance([text, labels_copy])
elif acd_sc_mode == 'single-single':
raise NotImplementedError('single-single')
class TextInAllAspectSentimentOutSentenceConstituency(DatasetReader):
def __init__(self, categories: List[str], polarities: List[str],
tokenizer: Callable[[str], List[str]]=lambda x: x.split(),
token_indexers: Dict[str, TokenIndexer]=None,
position_indexers: Dict[str, TokenIndexer]=None,
aspect_indexers: Dict[str, TokenIndexer]=None,
sentence_constituency_indexer: Dict[str, TokenIndexer]=None,
core_nlp: my_corenlp.StanfordCoreNLP=None,
configuration=None) -> None:
super().__init__(lazy=False)
self.tokenizer = tokenizer
self.token_indexers = token_indexers or {"tokens": SingleIdTokenIndexer(namespace="tokens")}
self.position_indexers = position_indexers or {"position": SingleIdTokenIndexer(namespace='position')}
self.aspect_indexers = aspect_indexers or {"aspect": SingleIdTokenIndexer(namespace='aspect')}
self.sentence_constituency_indexer = sentence_constituency_indexer
self.categories = categories
self.polarities = polarities
self.spacy_nlp = spacy.load("en_core_web_sm")
self.spacy_nlp.add_pipe(BeneparComponent('benepar_en'))
self.core_nlp = core_nlp
self.configuration = configuration
def _build_graph(self, tree: ConstituencyTreeNode):
graph = create_graph.create_sentence_constituency_graph_for_dgl(tree)
return graph
def _build_graph_with_dotted_line(self, tree: ConstituencyTreeNode):
graph = create_graph.create_sentence_constituency_graph_for_dgl_with_dotted_line(tree)
return graph
@overrides
def text_to_instance(self, sample: list) -> Instance:
fields = {}
text: str = sample['text'].strip()
text = re.sub('\\s+', ' ', text)
sample['text_clean'] = text
tree = ConstituencyTreeNode.parse_using_spacy(self.spacy_nlp, text)
inner_nodes = ConstituencyTreeNode.get_all_inner_nodes(tree)
inner_nodes.sort(key=lambda x: x.node_id)
nodes_labels = [e.labels[0] for e in inner_nodes]
sample['tree'] = tree
sample['inner_nodes'] = inner_nodes
sample['nodes_labels'] = nodes_labels
graph = self._build_graph(tree)
sample['graph'] = graph
graph_with_dotted_line = self._build_graph_with_dotted_line(tree)
sample['graph_with_dotted_line'] = graph_with_dotted_line
words = self.tokenizer(text)
sample['words'] = words
tokens = [Token(word) for word in words]
sentence_field = TextField(tokens, self.token_indexers)
fields['tokens'] = sentence_field
words_for_graph = words + nodes_labels
sample['words_for_graph'] = ['%d-%s' % (e[0], e[1]) for e in enumerate(words_for_graph)]
tokens_for_graph = [Token(word) for word in words_for_graph]
sentence_field_for_graph = TextField(tokens_for_graph, self.token_indexers)
fields['tokens_for_graph'] = sentence_field_for_graph
position = [Token(str(i)) for i in range(len(tokens))]
position_field = TextField(position, self.position_indexers)
fields['position'] = position_field
aspects = [Token(category) for category in self.categories]
aspect_field = TextField(aspects, self.aspect_indexers)
fields['aspects'] = aspect_field
category_labels = [0] * len(self.categories)
polarity_labels = [-100] * len(self.categories)
total_labels = []
if len(sample) > 1:
labels: list = sample['labels']
for label in labels:
category_labels[label[0]] = 1
polarity_labels[label[0]] = label[1]
for i in range(len(self.categories)):
if polarity_labels[i] == -100:
total_labels.append(0)
else:
total_labels.append(polarity_labels[i] + category_labels[i])
label_field = ArrayField(np.array(category_labels + polarity_labels + total_labels))
fields["label"] = label_field
polarity_mask = [1 if polarity_labels[i] != -100 else 0 for i in range(len(self.categories))]
polarity_mask_field = ArrayField(np.array(polarity_mask))
fields['polarity_mask'] = polarity_mask_field
# stop_word_labels = [1 if word in english_stop_words else 0 for word in words]
# stop_word_num = sum(stop_word_labels)
# stop_word_labels = [label / stop_word_num for label in stop_word_labels]
# sample.append(stop_word_labels)
sample_field = MetadataField(sample)
fields["sample"] = sample_field
return Instance(fields)
@overrides
def _read(self, samples: list) -> Iterator[Instance]:
acd_sc_mode = self.configuration['acd_sc_mode']
if acd_sc_mode == 'multi-multi':
for sample in samples:
yield self.text_to_instance({'text': sample[0], 'labels': sample[1]})
elif acd_sc_mode == 'multi-single':
for sample in samples:
text = sample[0]
labels = sample[1]
for i in range(len(labels)):
labels_copy = [list(e) for e in copy.deepcopy(labels)]
for j, label in enumerate(labels_copy):
if j != i:
labels_copy[j][1] = -100
yield self.text_to_instance({'text': text, 'labels': labels_copy})
elif acd_sc_mode == 'single-single':
raise NotImplementedError('single-single')
class AcdAndScDatasetReaderConstituencyBert(DatasetReader):
def __init__(self, categories: List[str], polarities: List[str],
tokenizer: Callable[[str], List[str]] = lambda x: x.split(),
token_indexers: Dict[str, TokenIndexer] = None,
position_indexers: Dict[str, TokenIndexer] = None,
core_nlp: my_corenlp.StanfordCoreNLP=None,
configuration=None,
bert_tokenizer=None,
bert_token_indexers=None,
sentence_constituency_indexer: Dict[str, TokenIndexer] = None) -> None:
super().__init__(lazy=False)
self.tokenizer = tokenizer
self.token_indexers = token_indexers or {"tokens": SingleIdTokenIndexer(namespace="tokens")}
self.bert_tokenizer = bert_tokenizer
self.bert_token_indexers = bert_token_indexers or {"bert": SingleIdTokenIndexer(namespace="bert")}
self.position_indexers = position_indexers or {"position": SingleIdTokenIndexer(namespace='position')}
self.sentence_constituency_indexer = sentence_constituency_indexer
self.categories = categories
self.polarities = polarities
self.spacy_nlp = spacy.load("en_core_web_sm")
self.spacy_nlp.add_pipe(BeneparComponent('benepar_en'))
self.core_nlp = core_nlp
self.configuration = configuration
def _build_graph(self, tree: ConstituencyTreeNode):
graph = create_graph.create_sentence_constituency_graph_for_dgl(tree)
return graph
def _build_graph_with_dotted_line(self, tree: ConstituencyTreeNode):
graph = create_graph.create_sentence_constituency_graph_for_dgl_with_dotted_line(tree)
return graph
@overrides
def text_to_instance(self, sample: list) -> Instance:
fields = {}
text: str = sample['text'].strip()
text = re.sub('\\s+', ' ', text)
sample['text_clean'] = text
tree = ConstituencyTreeNode.parse_using_spacy(self.spacy_nlp, text)
inner_nodes = ConstituencyTreeNode.get_all_inner_nodes(tree)
inner_nodes.sort(key=lambda x: x.node_id)
nodes_labels = [e.labels[0] for e in inner_nodes]
sample['tree'] = tree
sample['inner_nodes'] = inner_nodes
sample['nodes_labels'] = nodes_labels
graph = self._build_graph(tree)
sample['graph'] = graph
graph_with_dotted_line = self._build_graph_with_dotted_line(tree)
sample['graph_with_dotted_line'] = graph_with_dotted_line
words = self.tokenizer(text)
sample['words'] = words
tokens = [Token(word) for word in words]
sentence_field = TextField(tokens, self.token_indexers)
fields['tokens'] = sentence_field
words_for_graph = words + nodes_labels
sample['words_for_graph'] = ['%d-%s' % (e[0], e[1]) for e in enumerate(words_for_graph)]
tokens_for_graph = [Token(word) for word in words_for_graph]
sentence_field_for_graph = TextField(tokens_for_graph, self.token_indexers)
fields['tokens_for_graph'] = sentence_field_for_graph
bert_words = ['[CLS]']
word_index_and_bert_indices = {}
for i, word in enumerate(words):
bert_ws = self.bert_tokenizer.tokenize(word)
word_index_and_bert_indices[i] = []
for j in range(len(bert_ws)):
word_index_and_bert_indices[i].append(len(bert_words) + j)
bert_words.extend(bert_ws)
bert_words.append('[SEP]')
# for i in range(len(words)):
# print('%s-%s' % (words[i], str([bert_words[j] for j in word_index_and_bert_indices[i]])))
bert_text_fileds = []
bert_words_of_all_aspect = []
for aspect in self.categories:
if self.configuration['pair']:
aspect_words = self.bert_tokenizer.tokenize(aspect)
bert_words_of_aspect = bert_words + aspect_words + ['[SEP]']
else:
bert_words_of_aspect = bert_words
bert_words_of_all_aspect.append(bert_words_of_aspect)
bert_tokens_of_aspect = [Token(word) for word in bert_words_of_aspect]
bert_text_field = TextField(bert_tokens_of_aspect, self.bert_token_indexers)
bert_text_fileds.append(bert_text_field)
bert_field = ListField(bert_text_fileds)
fields['bert'] = bert_field
sample['bert_words'] = bert_words
sample['bert_words_of_all_aspect'] = bert_words_of_all_aspect
sample['word_index_and_bert_indices'] = word_index_and_bert_indices
position = [Token(str(i)) for i in range(len(tokens))]
position_field = TextField(position, self.position_indexers)
fields['position'] = position_field
category_labels = [0] * len(self.categories)
polarity_labels = [-100] * len(self.categories)
total_labels = []
if len(sample) > 1:
labels: list = sample['labels']
for label in labels:
category_labels[label[0]] = 1
polarity_labels[label[0]] = label[1]
for i in range(len(self.categories)):
if polarity_labels[i] == -100:
total_labels.append(0)
else:
total_labels.append(polarity_labels[i] + category_labels[i])
label_field = ArrayField(np.array(category_labels + polarity_labels + total_labels))
fields["label"] = label_field
polarity_mask = [1 if polarity_labels[i] != -100 else 0 for i in range(len(self.categories))]
polarity_mask_field = ArrayField(np.array(polarity_mask))
fields['polarity_mask'] = polarity_mask_field
sample_field = MetadataField(sample)
fields["sample"] = sample_field
return Instance(fields)
@overrides
def _read(self, samples: list) -> Iterator[Instance]:
acd_sc_mode = self.configuration['acd_sc_mode']
if acd_sc_mode == 'multi-multi':
for sample in samples:
yield self.text_to_instance({'text': sample[0], 'labels': sample[1]})
elif acd_sc_mode == 'multi-single':
for sample in samples:
text = sample[0]
labels = sample[1]
for i in range(len(labels)):
labels_copy = [list(e) for e in copy.deepcopy(labels)]
for j, label in enumerate(labels_copy):
if j != i:
labels_copy[j][1] = -100
yield self.text_to_instance({'text': text, 'labels': labels_copy})
elif acd_sc_mode == 'single-single':
raise NotImplementedError('single-single')
class AcdAndScDatasetReaderConstituencyBertSingle(DatasetReader):
def __init__(self, categories: List[str], polarities: List[str],
tokenizer: Callable[[str], List[str]] = lambda x: x.split(),
token_indexers: Dict[str, TokenIndexer] = None,
position_indexers: Dict[str, TokenIndexer] = None,
sentence_constituency_indexer: Dict[str, TokenIndexer] = None,
core_nlp: my_corenlp.StanfordCoreNLP=None,
configuration=None,
bert_tokenizer=None,
bert_token_indexers=None) -> None:
super().__init__(lazy=False)
self.tokenizer = tokenizer
self.token_indexers = token_indexers or {"tokens": SingleIdTokenIndexer(namespace="tokens")}
self.bert_tokenizer = bert_tokenizer
self.bert_token_indexers = bert_token_indexers or {"bert": SingleIdTokenIndexer(namespace="bert")}
self.position_indexers = position_indexers or {"position": SingleIdTokenIndexer(namespace='position')}
self.sentence_constituency_indexer = sentence_constituency_indexer
self.categories = categories
self.polarities = polarities
self.spacy_nlp = spacy.load("en_core_web_sm")
self.core_nlp = core_nlp
self.configuration = configuration
def _build_graph(self, tree: ConstituencyTreeNode):
graph = create_graph.create_sentence_constituency_graph_for_dgl(tree)
return graph
@overrides
def text_to_instance(self, sample: list) -> Instance:
fields = {}
text: str = sample['text'].strip()
tree = ConstituencyTreeNode.parse_using_spacy(self.spacy_nlp, text)
inner_nodes = ConstituencyTreeNode.get_all_inner_nodes(tree)
inner_nodes.sort(key=lambda x: x.node_id)
nodes_labels = [e.labels[0] for e in inner_nodes]
sample['tree'] = tree
sample['inner_nodes'] = inner_nodes
sample['nodes_labels'] = nodes_labels
graph = self._build_graph(tree)
sample['graph'] = graph
words = self.tokenizer(text)
sample['words'] = words
tokens = [Token(word) for word in words]
sentence_field = TextField(tokens, self.token_indexers)
fields['tokens'] = sentence_field
words_for_graph = words + nodes_labels
sample['words_for_graph'] = ['%d-%s' % (e[0], e[1]) for e in enumerate(words_for_graph)]
tokens_for_graph = [Token(word) for word in words_for_graph]
sentence_field_for_graph = TextField(tokens_for_graph, self.token_indexers)
fields['tokens_for_graph'] = sentence_field_for_graph
bert_words = ['[CLS]']
word_index_and_bert_indices = {}
for i, word in enumerate(words):
bert_ws = self.bert_tokenizer.tokenize(word)
word_index_and_bert_indices[i] = []
for j in range(len(bert_ws)):
word_index_and_bert_indices[i].append(len(bert_words) + j)
bert_words.extend(bert_ws)
bert_words.append('[SEP]')
# for i in range(len(words)):
# print('%s-%s' % (words[i], str([bert_words[j] for j in word_index_and_bert_indices[i]])))
bert_text_fileds = []
bert_words_of_all_aspect = []
bert_words_of_all_aspect.append(bert_words)
bert_tokens = [Token(word) for word in bert_words]
bert_text_field = TextField(bert_tokens, self.bert_token_indexers)
bert_text_fileds.append(bert_text_field)
bert_field = ListField(bert_text_fileds)
fields['bert'] = bert_field
sample['bert_words'] = bert_words
sample['bert_words_of_all_aspect'] = bert_words_of_all_aspect
sample['word_index_and_bert_indices'] = word_index_and_bert_indices
position = [Token(str(i)) for i in range(len(tokens))]
position_field = TextField(position, self.position_indexers)
fields['position'] = position_field
category_labels = [0] * len(self.categories)
polarity_labels = [-100] * len(self.categories)
total_labels = []
if len(sample) > 1:
labels: list = sample['labels']
for label in labels:
category_labels[label[0]] = 1
polarity_labels[label[0]] = label[1]
for i in range(len(self.categories)):
if polarity_labels[i] == -100:
total_labels.append(0)
else:
total_labels.append(polarity_labels[i] + category_labels[i])
label_field = ArrayField(np.array(category_labels + polarity_labels + total_labels))
fields["label"] = label_field
polarity_mask = [1 if polarity_labels[i] != -100 else 0 for i in range(len(self.categories))]
polarity_mask_field = ArrayField(np.array(polarity_mask))
fields['polarity_mask'] = polarity_mask_field
sample_field = MetadataField(sample)
fields["sample"] = sample_field
return Instance(fields)
@overrides
def _read(self, samples: list) -> Iterator[Instance]:
acd_sc_mode = self.configuration['acd_sc_mode']
if acd_sc_mode == 'multi-multi':
for sample in samples:
yield self.text_to_instance({'text': sample[0], 'labels': sample[1]})
elif acd_sc_mode == 'multi-single':
for sample in samples:
text = sample[0]
labels = sample[1]
for i in range(len(labels)):
labels_copy = [list(e) for e in copy.deepcopy(labels)]
for j, label in enumerate(labels_copy):
if j != i:
labels_copy[j][1] = -100
yield self.text_to_instance({'text': text, 'labels': labels_copy})
elif acd_sc_mode == 'single-single':
raise NotImplementedError('single-single')
| 45.005618 | 110 | 0.644489 | 2,853 | 24,033 | 5.156677 | 0.067648 | 0.026509 | 0.013594 | 0.013458 | 0.89342 | 0.887371 | 0.876699 | 0.873029 | 0.866504 | 0.866504 | 0 | 0.006762 | 0.255482 | 24,033 | 533 | 111 | 45.090056 | 0.815459 | 0.029418 | 0 | 0.861915 | 0 | 0.01559 | 0.052422 | 0.006263 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040089 | false | 0 | 0.077951 | 0 | 0.14922 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
811977e002c131f978e545823e309d7f7ba140b6 | 3,213 | py | Python | tests/io/test_data_formatter.py | JBGreisman/careless | 8f6c0859973757d11b26b65d9dc51d443030aa70 | [
"MIT"
] | 5 | 2021-02-08T16:34:38.000Z | 2022-03-25T19:16:09.000Z | tests/io/test_data_formatter.py | JBGreisman/careless | 8f6c0859973757d11b26b65d9dc51d443030aa70 | [
"MIT"
] | 28 | 2021-01-15T21:31:40.000Z | 2022-03-30T21:06:54.000Z | tests/io/test_data_formatter.py | JBGreisman/careless | 8f6c0859973757d11b26b65d9dc51d443030aa70 | [
"MIT"
] | 5 | 2021-02-12T18:43:58.000Z | 2022-02-02T21:38:56.000Z | import pytest
import reciprocalspaceship as rs
from careless.io.formatter import MonoFormatter,LaueFormatter
from careless.models.base import BaseModel
# If you change this, you need to leave 'dHKL' at the beginning
metadata_keys = ['dHKL', 'Hobs', 'image_id']
@pytest.mark.parametrize('intensity_key', ['I', None])
@pytest.mark.parametrize('sigma_key', ['SigI', None])
@pytest.mark.parametrize('image_id_key', ['BATCH', None])
@pytest.mark.parametrize('separate_outputs', [True, False])
@pytest.mark.parametrize('anomalous', [True, False])
@pytest.mark.parametrize('dmin', [0., 7.])
@pytest.mark.parametrize('isigi_cutoff', [None, 3.])
@pytest.mark.parametrize('positional_encoding_keys', [None, ['X', 'Y']])
@pytest.mark.parametrize('encoding_bit_depth', [3])
def test_mono_formatter(
intensity_key,
sigma_key,
image_id_key,
separate_outputs,
anomalous,
dmin,
isigi_cutoff,
positional_encoding_keys,
encoding_bit_depth,
mono_data_set,
):
ds = mono_data_set.copy()
f = MonoFormatter(
intensity_key,
sigma_key,
image_id_key,
metadata_keys,
separate_outputs,
anomalous,
dmin,
isigi_cutoff,
positional_encoding_keys,
encoding_bit_depth,
)
inputs,rac = f([ds])
length = None
for v in inputs:
assert v.ndim == 2
assert v.dtype in ('float32', 'int64')
if length is None:
length = v.shape[0]
assert v.shape[0] == length
metadata = BaseModel.get_metadata(inputs)
@pytest.mark.parametrize('lam_min', [None, 0.8])
@pytest.mark.parametrize('lam_max', [None, 1.5])
@pytest.mark.parametrize('intensity_key', ['I', None])
@pytest.mark.parametrize('sigma_key', ['SigI', None])
@pytest.mark.parametrize('image_id_key', ['BATCH', None])
@pytest.mark.parametrize('separate_outputs', [True, False])
@pytest.mark.parametrize('anomalous', [True, False])
@pytest.mark.parametrize('dmin', [None, 7.])
@pytest.mark.parametrize('isigi_cutoff', [None, 3.])
@pytest.mark.parametrize('positional_encoding_keys', [None, ['X', 'Y']])
@pytest.mark.parametrize('encoding_bit_depth', [3])
def test_laue_formatter(
lam_min,
lam_max,
intensity_key,
sigma_key,
image_id_key,
separate_outputs,
anomalous,
dmin,
isigi_cutoff,
positional_encoding_keys,
encoding_bit_depth,
laue_data_set,
):
ds = laue_data_set.copy()
f = LaueFormatter(
'Wavelength',
intensity_key,
sigma_key,
image_id_key,
metadata_keys,
separate_outputs,
anomalous,
lam_min,
lam_max,
dmin,
isigi_cutoff,
positional_encoding_keys,
encoding_bit_depth,
)
inputs,rac = f([ds])
length = None
for v in inputs:
assert v.ndim == 2
assert v.dtype in ('float32', 'int64')
if length is None:
length = v.shape[0]
assert v.shape[0] == length
metadata = BaseModel.get_metadata(inputs)
| 29.75 | 72 | 0.614379 | 374 | 3,213 | 5.053476 | 0.23262 | 0.10582 | 0.222222 | 0.079365 | 0.77672 | 0.77672 | 0.77672 | 0.77672 | 0.77672 | 0.77672 | 0 | 0.010513 | 0.259882 | 3,213 | 107 | 73 | 30.028037 | 0.784273 | 0.018985 | 0 | 0.795918 | 0 | 0 | 0.102255 | 0.015243 | 0 | 0 | 0 | 0 | 0.061224 | 1 | 0.020408 | false | 0 | 0.040816 | 0 | 0.061224 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4a24d95b0c6a4cf675fb2a7821f82b45a068ece | 28,356 | py | Python | tests/broker/test_justification.py | ned21/aquilon | 6562ea0f224cda33b72a6f7664f48d65f96bd41a | [
"Apache-2.0"
] | 7 | 2015-07-31T05:57:30.000Z | 2021-09-07T15:18:56.000Z | tests/broker/test_justification.py | ned21/aquilon | 6562ea0f224cda33b72a6f7664f48d65f96bd41a | [
"Apache-2.0"
] | 115 | 2015-03-03T13:11:46.000Z | 2021-09-20T12:42:24.000Z | tests/broker/test_justification.py | ned21/aquilon | 6562ea0f224cda33b72a6f7664f48d65f96bd41a | [
"Apache-2.0"
] | 13 | 2015-03-03T11:17:59.000Z | 2021-09-09T09:16:41.000Z | #!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2014,2015,2016,2017,2018 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the make command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from broker.brokertest import TestBrokerCommand
from broker.personalitytest import PersonalityTestMixin
GRN = "grn:/ms/ei/aquilon/aqd"
PPROD = "justify-prod"
QPROD = "justify-qa"
class TestJustification(PersonalityTestMixin, TestBrokerCommand):
def test_100_setup(self):
command = ["add", "feature", "--feature", "testfeature",
"--type", "host", "--grn", GRN, "--visibility", "public",
"--activation", "reboot", "--deactivation", "reboot"]
self.noouttest(command)
command = ["add", "feature", "--feature", "testclusterfeature",
"--type", "host", "--grn", GRN, "--visibility", "public",
"--activation", "reboot", "--deactivation", "reboot"]
self.noouttest(command)
def test_110_host_setup(self):
host_list = ["aquilon91.aqd-unittest.ms.com", "unittest26.aqd-unittest.ms.com"]
for host in host_list:
command = ["reconfigure", "--hostname", host,
"--archetype", "aquilon", "--buildstatus", "ready",
"--personality", PPROD, "--personality_stage", "next"] + self.valid_just_tcm
self.statustest(command)
def test_200_update_personality(self):
command = ["update_personality",
"--archetype", "aquilon",
"--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_210_add_parameter(self):
command = ["add_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/users",
"--value", "test"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_215_update_parameter(self):
command = ["update_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/users",
"--value", "test"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_220_del_parameter(self):
command = ["del_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/users"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_230_map_grn(self):
command = ["map_grn",
"--archetype", "aquilon",
"--personality", PPROD,
"--grn", GRN,
"--target", "esp"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_240_unmap_grn(self):
command = ["unmap_grn",
"--archetype", "aquilon",
"--personality", PPROD,
"--grn", GRN,
"--target", "esp"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_270_add_required_svc(self):
command = ["add_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_280_del_required_svc(self):
command = ["del_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_290_add_static_route(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.248.0", "--prefixlen", "24",
"--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.statustest(command)
def test_300_del_static_route(self):
gw = self.net["routing1"].usable[-1]
command = ["del", "static", "route", "--gateway", gw,
"--ip", "192.168.248.0", "--prefixlen", "24",
"--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.statustest(command)
def test_305_update_host_back(self):
host = "unittest26.aqd-unittest.ms.com"
command = ["reconfigure", "--hostname", host,
"--archetype", "aquilon", "--buildstatus", "ready",
"--personality", "inventory"] + self.valid_just_tcm
self.statustest(command)
def test_310_map_service(self):
command = ["map", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_320_unmap_service(self):
command = ["unmap", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_330_bind_feature(self):
command = ["bind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.statustest(command)
def test_340_unbind_feature(self):
command = ["unbind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.statustest(command)
def test_350_map_service(self):
command = ["map", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_360_unmap_service(self):
command = ["unmap", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_370_map_service(self):
command = ["map", "service", "--organization", "ms",
"--service", "vmseasoning", "--instance", "pepper"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_380_unmap_service(self):
command = ["unmap", "service", "--organization", "ms",
"--service", "vmseasoning", "--instance", "pepper"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.valid_just_tcm
self.noouttest(command)
def test_400_host_setup(self):
h = "aquilon91.aqd-unittest.ms.com"
command = ["reconfigure", "--hostname", h,
"--archetype", "aquilon",
"--personality", QPROD, "--personality_stage", "next"] \
+ self.valid_just_tcm
self.statustest(command)
def test_405_update_personality(self):
command = ["update_personality",
"--archetype", "aquilon",
"--personality", QPROD]
self.noouttest(command)
def test_410_add_parameter(self):
command = ["add_parameter",
"--archetype", "aquilon",
"--personality", QPROD,
"--path", "access/users",
"--value", "test"]
self.noouttest(command)
def test_415_update_parameter(self):
command = ["update_parameter",
"--archetype", "aquilon",
"--personality", QPROD,
"--path", "access/users",
"--value", "test"]
self.noouttest(command)
def test_420_del_parameter(self):
command = ["del_parameter",
"--archetype", "aquilon",
"--personality", QPROD,
"--path", "access/users"]
self.noouttest(command)
def test_430_map_grn(self):
command = ["map_grn",
"--archetype", "aquilon",
"--personality", QPROD,
"--grn", GRN,
"--target", "esp"]
self.noouttest(command)
def test_440_unmap_grn(self):
command = ["unmap_grn",
"--archetype", "aquilon",
"--personality", QPROD,
"--grn", GRN,
"--target", "esp"]
self.noouttest(command)
def test_470_add_required_svc(self):
command = ["add_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", QPROD]
self.noouttest(command)
def test_480_del_required_svc(self):
command = ["del_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", QPROD]
self.noouttest(command)
def test_490_add_static_route(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.248.0", "--prefixlen", "24",
"--personality", QPROD]
self.noouttest(command)
def test_500_del_static_route(self):
gw = self.net["routing1"].usable[-1]
command = ["del", "static", "route", "--gateway", gw,
"--ip", "192.168.248.0", "--prefixlen", "24",
"--personality", QPROD]
self.noouttest(command)
def test_510_map_service(self):
command = ["map", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", QPROD]
self.noouttest(command)
def test_520_unmap_service(self):
command = ["unmap", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", QPROD]
self.noouttest(command)
def test_530_bind_feature(self):
command = ["bind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", QPROD]
self.statustest(command)
def test_540_unbind_feature(self):
command = ["unbind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", QPROD]
self.statustest(command)
def test_600_host_setup(self):
h = "aquilon91.aqd-unittest.ms.com"
command = ["reconfigure", "--hostname", h,
"--archetype", "aquilon",
"--personality", PPROD, "--personality_stage", "next"]
self.statustest(command)
def test_601_justification_no_reason(self):
command = ["update_personality",
"--archetype", "aquilon",
"--personality", PPROD] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
def test_605_update_personality_reason(self):
command = ["update_personality",
"--archetype", "aquilon",
"--personality", PPROD] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_610_add_parameter_reason(self):
command = ["add_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/netgroup",
"--value", "test"] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["add_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/netgroup",
"--value", "test"] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_620_update_parameter_reason(self):
command = ["update_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/netgroup",
"--value", "test"] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["update_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/netgroup",
"--value", "test"] + self.emergency_tcm_just_with_reason
self.noouttest(command)
def test_630_del_parameter_reason(self):
command = ["del_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/netgroup"] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["del_parameter",
"--archetype", "aquilon",
"--personality", PPROD,
"--path", "access/netgroup"] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_640_map_grn_reason(self):
command = ["map_grn",
"--archetype", "aquilon",
"--personality", PPROD,
"--grn", GRN,
"--target", "esp"] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["map_grn",
"--archetype", "aquilon",
"--personality", PPROD,
"--grn", GRN,
"--target", "esp"] + self.emergency_tcm_just_with_reason
self.noouttest(command)
def test_650_map_grn_reason(self):
command = ["unmap_grn",
"--archetype", "aquilon",
"--personality", PPROD,
"--grn", GRN,
"--target", "esp"] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["unmap_grn",
"--archetype", "aquilon",
"--personality", PPROD,
"--grn", GRN,
"--target", "esp"] + self.emergency_tcm_just_with_reason
self.noouttest(command)
def test_660_add_required_svc_reason(self):
command = ["add_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", PPROD] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["add_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", PPROD] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_670_del_required_svc_reason(self):
command = ["del_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", PPROD] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["del_required_service", "--service=chooser1",
"--archetype=aquilon", "--personality", PPROD] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_675_add_required_svc_reason_os(self):
command = ["add_required_service", "--service=chooser1",
"--archetype=aquilon", "--osname", "linux", "--osversion",
"5.1-x86_64"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command = ["add_required_service", "--service=chooser1",
"--archetype=aquilon", "--osname", "linux", "--osversion",
"5.1-x86_64"] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_676_del_required_svc_reason_os(self):
command = ["del_required_service", "--service=chooser1",
"--archetype=aquilon", "--osname", "linux", "--osversion",
"5.1-x86_64"] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["del_required_service", "--service=chooser1",
"--archetype=aquilon", "--osname", "linux", "--osversion",
"5.1-x86_64"] + self.emergency_tcm_just_with_reason
self.noouttest(command)
def test_680_add_static_route_reason(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.248.0", "--prefixlen", "24",
"--personality", PPROD] + self.emergency_tcm_just_with_reason
self.statustest(command)
def test_690_del_static_route_reason(self):
gw = self.net["routing1"].usable[-1]
command = ["del", "static", "route", "--gateway", gw,
"--ip", "192.168.248.0", "--prefixlen", "24",
"--personality", PPROD] + self.emergency_tcm_just_with_reason
self.statustest(command)
def test_700_add_service_reason(self):
command = ["map", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["map", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_710_del_service_reason(self):
command = ["unmap", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["unmap", "service", "--organization", "ms",
"--service", "utsvc", "--instance", "utsi2",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_720_add_feature_reason(self):
command = ["bind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["bind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_730_del_feature_reason(self):
command = ["unbind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command = ["unbind", "feature", "--feature", "testfeature",
"--archetype", "aquilon", "--personality", PPROD] + self.emergency_just_with_reason
self.emergencynojustification(command)
def test_800_bind_feature_restricted(self):
command = ["add", "feature", "--feature", "nonpublicfeature",
"--type", "host", "--grn", "grn:/ms/ei/aquilon/unittest",
"--activation", "reboot", "--deactivation", "reboot"]
self.noouttest(command)
def test_810_bind_feature_restricted_qa(self):
command = ["bind", "feature", "--feature", "nonpublicfeature",
"--archetype", "aquilon", "--personality", QPROD]
self.statustest(command)
command = ["unbind", "feature", "--feature", "nonpublicfeature",
"--archetype", "aquilon", "--personality", QPROD]
self.statustest(command)
def test_820_bind_feature_restricted_prod(self):
command = ["bind", "feature", "--feature", "nonpublicfeature",
"--archetype", "aquilon", "--personality", PPROD]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command = ["bind", "feature", "--feature", "nonpublicfeature",
"--archetype", "aquilon", "--personality", PPROD] + self.valid_just_tcm
self.statustest(command)
command = ["unbind", "feature", "--feature", "nonpublicfeature",
"--archetype", "aquilon", "--personality", PPROD] + self.valid_just_tcm
self.statustest(command)
def test_850_bind_feature_restricted(self):
command = ["del", "feature", "--feature", "nonpublicfeature",
"--type", "host"]
self.noouttest(command)
def test_860_accepted_tcm(self):
# To Do: create tests for rejected tickets
command = ["update_personality",
"--archetype", "aquilon",
"--personality", PPROD] + self.valid_just_tcm
self.noouttest(command)
def test_870_accepted_sn(self):
# To Do: create tests for rejected tickets
command = ["update_personality",
"--archetype", "aquilon",
"--personality", PPROD] + self.valid_just_sn
self.noouttest(command)
def test_880_bind_feature_prod_cluster(self):
command = ["bind_feature", "--feature", "testclusterfeature",
"--personality", "hapersonality"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command += self.just_reason
self.emergencynojustification(command)
def test_890_add_parameter_definition_prod_cluster(self):
command = ["add_parameter_definition", "--feature", "testclusterfeature",
"--type", "host", "--path=teststringcluster", "--value_type=string",
"--default", "default"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command += self.just_reason
self.emergencynojustification(command)
def test_895_del_parameter_definition_prod_cluster(self):
command = ["del_parameter_definition", "--feature", "testclusterfeature",
"--type", "host", "--path=teststringcluster"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command += self.just_reason
self.emergencynojustification(command)
def test_896_unbind_feature_prod_cluster(self):
command = ["unbind_feature", "--feature", "testclusterfeature",
"--personality", "hapersonality"]
self.justificationmissingtest(command, auth=True, msgcheck=False)
command += self.emergency_just_without_reason
self.reasonmissingtest(command, auth=True, msgcheck=False)
command += self.just_reason
self.emergencynojustification(command)
def test_900_justification_required(self):
command = "show host --host aquilon91.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, 'Environment: prod',
command)
self.matchoutput(out, 'Build Status: ready',
command)
command = ["add", "alias",
"--fqdn", "aliasjustreq.aqd-unittest.ms.com",
"--target", "aquilon91.aqd-unittest.ms.com"]
self.justificationmissingtest_warn(command)
command = ["add", "alias",
"--fqdn", "aliasjustreq2.aqd-unittest.ms.com",
"--target", "aliasjustreq.aqd-unittest.ms.com"]
self.justificationmissingtest_warn(command)
cmd = "show address --fqdn aquilon91.aqd-unittest.ms.com"
out = self.commandtest(cmd.split(" "))
self.matchoutput(out, "Aliases: aliasjustreq.aqd-unittest.ms.com, "
"aliasjustreq2.aqd-unittest.ms.com", cmd)
def test_905_justification_required(self):
command = ["del", "alias",
"--fqdn", "aliasjustreq2.aqd-unittest.ms.com"]
self.justificationmissingtest_warn(command)
command = ["del", "alias",
"--fqdn", "aliasjustreq.aqd-unittest.ms.com"] + self.valid_just_tcm
self.successtest(command)
cmd = "show address --fqdn aquilon91.aqd-unittest.ms.com"
out = self.commandtest(cmd.split(" "))
self.matchclean(out, "Aliases: aliasjustreq.aqd-unittest.ms.com, "
"aliasjustreq2.aqd-unittest.ms.com", cmd)
def test_910_aqd_checkedm_unknownerror(self):
command = ["update_domain", "--domain", "deployable", "--archived"]
out = self.commandtest(command)
command = ["del_domain", "--domain", "deployable"] + self.exception_trigger_just_tcm
err = self.internalerrortest(command)
self.matchoutput(err, "Invalid response received for the change "
"management check. No JSON object could be decoded", command)
command = ["update_domain", "--domain", "deployable", "--noarchived"]
out = self.commandtest(command)
def test_915_test_subporcess_timeout(self):
command = ["update_domain", "--domain", "deployable", "--archived"]
out = self.commandtest(command)
command = ["del_domain", "--domain", "deployable"] + self.timeout_trigger_just_tcm
err = self.internalerrortest(command)
self.matchoutput(err, "when reaching timeout of 15 sec", command)
command = ["update_domain", "--domain", "deployable", "--noarchived"]
out = self.commandtest(command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestJustification)
unittest.TextTestRunner(verbosity=2).run(suite)
| 42.640602 | 105 | 0.584356 | 2,646 | 28,356 | 6.068027 | 0.126984 | 0.029646 | 0.055805 | 0.083707 | 0.878924 | 0.84442 | 0.833832 | 0.824489 | 0.806677 | 0.752429 | 0 | 0.018664 | 0.270631 | 28,356 | 664 | 106 | 42.704819 | 0.757664 | 0.028565 | 0 | 0.745665 | 0 | 0 | 0.251399 | 0.025507 | 0 | 0 | 0 | 0 | 0 | 1 | 0.131021 | false | 0 | 0.009634 | 0 | 0.142582 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4c23d80a8fd29dfdfedb125ece4e14c630e2ac5 | 68,608 | py | Python | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/EightThreads_libquantum/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/EightThreads_libquantum/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/EightThreads_libquantum/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.163182,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.282572,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.162063,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.607816,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.161299,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.17722,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00591546,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0427765,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0437485,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0427765,
'Execution Unit/Register Files/Runtime Dynamic': 0.0496639,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.103366,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.290607,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 1.55615,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00139697,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00139697,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00122242,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000476312,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00062845,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0046448,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0131918,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0420565,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.67516,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.1886,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.142843,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.02549,
'Instruction Fetch Unit/Runtime Dynamic': 0.391336,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0645272,
'L2/Runtime Dynamic': 0.0170111,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.92672,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.358215,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0223103,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0223102,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.03251,
'Load Store Unit/Runtime Dynamic': 0.490551,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0550134,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.110026,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0195244,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0204932,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.166331,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0309189,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.358584,
'Memory Management Unit/Runtime Dynamic': 0.0514121,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 17.22,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.00834421,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0859063,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.0942505,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 2.60071,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0523537,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0844445,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0426247,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.179423,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.059878,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.93987,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00219595,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0158796,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0162404,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0158796,
'Execution Unit/Register Files/Runtime Dynamic': 0.0184363,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0334539,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.0936856,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.899611,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000545206,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000545206,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000481139,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000189683,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000233294,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00180484,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00500353,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0156123,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.993079,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0712866,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0530264,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.25979,
'Instruction Fetch Unit/Runtime Dynamic': 0.146734,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0268919,
'L2/Runtime Dynamic': 0.00915849,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.51137,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.146314,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.00887263,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.00887267,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.55327,
'Load Store Unit/Runtime Dynamic': 0.198944,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0218784,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.043757,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.00776471,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.00816853,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.061746,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0116867,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.231193,
'Memory Management Unit/Runtime Dynamic': 0.0198552,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.6005,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00236205,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0269393,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0293014,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.3036,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0502308,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0810205,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0408964,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.172148,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0574506,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.93511,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00210691,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0152359,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0155819,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0152359,
'Execution Unit/Register Files/Runtime Dynamic': 0.0176888,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0320977,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.0904574,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.88836,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000515972,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000515972,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000454699,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000178913,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000223835,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00171048,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00475818,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0149792,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.952807,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.068342,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0508763,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.21756,
'Instruction Fetch Unit/Runtime Dynamic': 0.140666,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0255279,
'L2/Runtime Dynamic': 0.00850176,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.49977,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.139694,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.00849739,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.00849749,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.5399,
'Load Store Unit/Runtime Dynamic': 0.190098,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0209531,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0419067,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.00743633,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.00781969,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.059242,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.011204,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.228125,
'Memory Management Unit/Runtime Dynamic': 0.0190236,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.5357,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00226628,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0258581,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0281244,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.27477,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0442868,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.071433,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.036057,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.151777,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0506524,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.9218,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00185759,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.013433,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.013738,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.013433,
'Execution Unit/Register Files/Runtime Dynamic': 0.0155956,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0282996,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.0800243,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.855462,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000455626,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000455626,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000401942,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000158383,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000197347,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00151054,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00418657,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0132067,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.840058,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0604113,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0448559,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.09934,
'Instruction Fetch Unit/Runtime Dynamic': 0.124171,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0229629,
'L2/Runtime Dynamic': 0.00795275,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.47206,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.125513,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.00760078,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0076008,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.50795,
'Load Store Unit/Runtime Dynamic': 0.170599,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0187422,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0374845,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.00665168,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.00699651,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0522317,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00990379,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.219767,
'Memory Management Unit/Runtime Dynamic': 0.0169003,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.3613,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0019981,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0228004,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0247985,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.19988,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 6.314308789166848,
'Runtime Dynamic': 6.314308789166848,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.299406,
'Runtime Dynamic': 0.0970383,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 55.0169,
'Peak Power': 88.1291,
'Runtime Dynamic': 6.47601,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 54.7175,
'Total Cores/Runtime Dynamic': 6.37897,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.299406,
'Total L3s/Runtime Dynamic': 0.0970383,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.063457 | 124 | 0.682078 | 8,082 | 68,608 | 5.784212 | 0.064959 | 0.123556 | 0.112946 | 0.093437 | 0.941024 | 0.93373 | 0.920745 | 0.895824 | 0.867417 | 0.84855 | 0 | 0.131932 | 0.224332 | 68,608 | 914 | 125 | 75.063457 | 0.74651 | 0 | 0 | 0.664114 | 0 | 0 | 0.657421 | 0.048099 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4dc9fa99aa5f1de0db7a2717e77dd7d34c0fd37 | 4,791 | py | Python | predict_and_recompute/numerical_experiments/cg_variants/pr_cg.py | tchen01/new_cg_variants | 2f1ec5993f69ea771a94798b9a27bea30f52e804 | [
"MIT"
] | null | null | null | predict_and_recompute/numerical_experiments/cg_variants/pr_cg.py | tchen01/new_cg_variants | 2f1ec5993f69ea771a94798b9a27bea30f52e804 | [
"MIT"
] | null | null | null | predict_and_recompute/numerical_experiments/cg_variants/pr_cg.py | tchen01/new_cg_variants | 2f1ec5993f69ea771a94798b9a27bea30f52e804 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import scipy as sp
from scipy import sparse
from scipy.sparse import linalg
def pr_master_cg(A,b,x0,max_iter,variant='',callbacks=[],**kwargs):
'''
master template for predict-and-recompute conjugate gradients
'''
# get size of problem
n = len(b)
# initialize
output = {}
output['name'] = f'{variant}_cg'
output['max_iter'] = max_iter
x_k = np.copy(x0)
r_k = np.copy(b - A @ x_k)
nu_k = rt_k @ r_k
p_k = np.copy(r_k)
s_k = A @ p_k
mu_k = p_k @ s_k
a_k = nu_k / mu_k
del_k = r_k @ s_k
gam_k = s_k @ s_k
a_k1 = 0
a_k2 = 0
b_k = 0
b_k1 = 0
k=0
for callback in callbacks:
callback(**locals())
# run main optimization
for k in range(1,max_iter):
# update indexing
a_k2 = a_k1
a_k1 = a_k
b_k1 = b_k
nu_k1 = nu_k
del_k1 = del_k
gam_k1 = gam_k
x_k1 = np.copy(x_k)
r_k1 = np.copy(r_k)
w_k1 = np.copy(w_k)
p_k1 = np.copy(p_k)
s_k1 = np.copy(s_k)
# main loop
x_k = x_k1 + a_k1 * p_k1
r_k = r_k1 - a_k1 * s_k1
w_k = A @ r_k
nu_k = - nu_k1 + a_k1**2 * gam_k1 if variant == 'm' else \
nu_k1 - 2 * a_k1 * del_k1 + a_k1**2 * gam_k1
b_k = nu_k / nu_k1
p_k = r_k + b_k * p_k1
s_k = A @ p_k
mu_k = p_k @ s_k
del_k = r_k @ st_k
gam_k = s_k @ s_k
nu_k = r_k @ r_k
a_k = nu_k / mu_k
# call callback functions
for callback in callbacks:
callback(**locals())
return output
def pr_cg(A,b,x0,max_iter,callbacks=[],**kwargs):
'''
predict-and-recompute conjugate gradient
(implementation from Chen 2019)
'''
return pr_master_cg(A,b,x0,max_iter,callbacks=callbacks,variant='pr',**kwargs)
def m_cg(A,b,x0,max_iter,callbacks=[],**kwargs):
'''
predict-and-recompute Meurant conjugate gradient
'''
return pr_master_cg(A,b,x0,max_iter,callbacks=callbacks,variant='m',**kwargs)
def pr_master_pcg(A,b,x0,max_iter,preconditioner=lambda x:x,variant='',callbacks=[],**kwargs):
'''
master template for predict-and-recompute conjugate gradients (preconditioned)
'''
# get size of problem
n = len(b)
# initialize
output = {}
output['name'] = f"{variant}_pcg"
output['max_iter'] = max_iter
x_k = np.copy(x0)
r_k = np.copy(b - A @ x_k)
rt_k = preconditioner(r_k)
nu_k = rt_k @ r_k
p_k = np.copy(rt_k)
s_k = A @ p_k
st_k = preconditioner(s_k)
mu_k = p_k @ s_k
a_k = nu_k / mu_k
del_k = r_k @ st_k
gam_k = st_k @ s_k
a_k1 = 0
a_k2 = 0
b_k = 0
b_k1 = 0
k=0
for callback in callbacks:
callback(**locals())
# run main optimization
for k in range(1,max_iter):
# update indexing
a_k2 = a_k1
a_k1 = a_k
b_k1 = b_k
nu_k1 = nu_k
del_k1 = del_k
gam_k1 = gam_k
x_k1 = np.copy(x_k)
r_k1 = np.copy(r_k)
rt_k1 = np.copy(rt_k)
p_k1 = np.copy(p_k)
s_k1 = np.copy(s_k)
st_k1 = np.copy(st_k)
# main loop
x_k = x_k1 + a_k1 * p_k1
r_k = r_k1 - a_k1 * s_k1
rt_k = rt_k1 - a_k1 * st_k1
nu_k = - nu_k1 + a_k1**2 * gam_k1 if variant == 'm' else nu_k1 - 2 * a_k1 * del_k1 + a_k1**2 * gam_k1
b_k = nu_k / nu_k1
p_k = rt_k + b_k * p_k1
s_k = A @ p_k
st_k = preconditioner(s_k)
mu_k = p_k @ s_k
del_k = r_k @ st_k
gam_k = st_k @ s_k
nu_k = rt_k @ r_k
a_k = nu_k / mu_k
# call callback functions
for callback in callbacks:
callback(**locals())
return output
def pr_pcg(A,b,x0,max_iter,preconditioner=lambda x:x,callbacks=[],**kwargs):
'''
predict-and-recompute conjugate gradient (preconditioned)
'''
return pr_master_pcg(A,b,x0,max_iter,preconditioner=preconditioner,callbacks=callbacks,variant='pr',**kwargs)
def m_pcg(A,b,x0,max_iter,preconditioner=lambda x:x,callbacks=[],**kwargs):
'''
predict-and-recompute Meurant conjugate gradient (preconditioned)
'''
return pr_master_pcg(A,b,x0,max_iter,preconditioner=preconditioner,callbacks=callbacks,variant='m',**kwargs)
| 27.067797 | 113 | 0.518263 | 763 | 4,791 | 2.95806 | 0.115334 | 0.016837 | 0.01728 | 0.031015 | 0.896766 | 0.88879 | 0.887904 | 0.863093 | 0.840053 | 0.836509 | 0 | 0.033887 | 0.371739 | 4,791 | 176 | 114 | 27.221591 | 0.715947 | 0.132749 | 0 | 0.725664 | 0 | 0 | 0.014085 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.053097 | false | 0 | 0.035398 | 0 | 0.141593 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4e82fb4ca981b69836b82cb2c7dd6e822e3c54a | 1,635 | py | Python | sdk/python/pulumi_google_native/healthcare/v1beta1/__init__.py | AaronFriel/pulumi-google-native | 75d1cda425e33d4610348972cd70bddf35f1770d | [
"Apache-2.0"
] | 44 | 2021-04-18T23:00:48.000Z | 2022-02-14T17:43:15.000Z | sdk/python/pulumi_google_native/healthcare/v1beta1/__init__.py | AaronFriel/pulumi-google-native | 75d1cda425e33d4610348972cd70bddf35f1770d | [
"Apache-2.0"
] | 354 | 2021-04-16T16:48:39.000Z | 2022-03-31T17:16:39.000Z | sdk/python/pulumi_google_native/healthcare/v1beta1/__init__.py | AaronFriel/pulumi-google-native | 75d1cda425e33d4610348972cd70bddf35f1770d | [
"Apache-2.0"
] | 8 | 2021-04-24T17:46:51.000Z | 2022-01-05T10:40:21.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from ... import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .annotation import *
from .annotation_store import *
from .attribute_definition import *
from .consent import *
from .consent_artifact import *
from .consent_store import *
from .dataset import *
from .dataset_annotation_store_iam_policy import *
from .dataset_consent_store_iam_policy import *
from .dataset_dicom_store_iam_policy import *
from .dataset_fhir_store_iam_policy import *
from .dataset_hl7_v2_store_iam_policy import *
from .dataset_iam_policy import *
from .dicom_store import *
from .fhir_store import *
from .get_annotation import *
from .get_annotation_store import *
from .get_attribute_definition import *
from .get_consent import *
from .get_consent_artifact import *
from .get_consent_store import *
from .get_dataset import *
from .get_dataset_annotation_store_iam_policy import *
from .get_dataset_consent_store_iam_policy import *
from .get_dataset_dicom_store_iam_policy import *
from .get_dataset_fhir_store_iam_policy import *
from .get_dataset_hl7_v2_store_iam_policy import *
from .get_dataset_iam_policy import *
from .get_dicom_store import *
from .get_fhir import *
from .get_fhir_store import *
from .get_hl7_v2_store import *
from .get_message import *
from .get_user_data_mapping import *
from .hl7_v2_store import *
from .message import *
from .user_data_mapping import *
from ._inputs import *
from . import outputs
| 34.0625 | 80 | 0.809174 | 245 | 1,635 | 5.040816 | 0.228571 | 0.315789 | 0.2 | 0.184615 | 0.466397 | 0.324696 | 0.319028 | 0.0583 | 0 | 0 | 0 | 0.006298 | 0.125994 | 1,635 | 47 | 81 | 34.787234 | 0.857943 | 0.124159 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
be0a3181fd0a07afa04c33944fd630d521c4855e | 125 | py | Python | countersign/__init__.py | JSextonn/countersign | 6db0f312c119b6bf3be4dba669bdd6513751a0b3 | [
"MIT"
] | null | null | null | countersign/__init__.py | JSextonn/countersign | 6db0f312c119b6bf3be4dba669bdd6513751a0b3 | [
"MIT"
] | null | null | null | countersign/__init__.py | JSextonn/countersign | 6db0f312c119b6bf3be4dba669bdd6513751a0b3 | [
"MIT"
] | null | null | null | __version__ = '0.1.1'
from countersign.core import *
from countersign.password import *
from countersign.passphrase import * | 25 | 36 | 0.792 | 16 | 125 | 5.9375 | 0.5625 | 0.473684 | 0.442105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027273 | 0.12 | 125 | 5 | 36 | 25 | 0.836364 | 0 | 0 | 0 | 0 | 0 | 0.039683 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.5 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
be177a6d2343fbe5fc4a673c226c93107cefe396 | 141 | py | Python | luvina/backend/__init__.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | luvina/backend/__init__.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | luvina/backend/__init__.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | # from .common import *
# from .enchant_backend import *
# from .nltk_backend import *
# from .spacy_backend import *
from .backend import *
| 23.5 | 32 | 0.730496 | 18 | 141 | 5.555556 | 0.388889 | 0.4 | 0.51 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.170213 | 141 | 5 | 33 | 28.2 | 0.854701 | 0.77305 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
be22aa40c377ac1aeb87cbba5f438844887eecdd | 321,662 | py | Python | Uncertainty/data/case-ln/case_ln_159.py | thanever/SOC | 9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4 | [
"MIT"
] | null | null | null | Uncertainty/data/case-ln/case_ln_159.py | thanever/SOC | 9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4 | [
"MIT"
] | null | null | null | Uncertainty/data/case-ln/case_ln_159.py | thanever/SOC | 9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4 | [
"MIT"
] | null | null | null | from numpy import array
def case_ln_159():
ppc = {"version": '2'}
ppc["baseMVA"] = 100.0
ppc["bus"] = array([
[1.0, 1.0, 69.2657, 18.4709, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[3.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[4.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[5.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[6.0, 1.0, 13.8531, 5.0795, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[7.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[8.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[9.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 11.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[10.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 11.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[11.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[12.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[13.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[14.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[15.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[16.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[17.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[18.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[19.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[20.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[21.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[22.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[23.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[24.0, 2.0, 4.6177, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 6.3, 1.0, 1.1, 0.95, 0.6, 10 ],
[25.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[26.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[27.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[28.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[29.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[30.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[31.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[32.0, 2.0, 3.2324, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[33.0, 2.0, 2.7706, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[34.0, 2.0, 2.7706, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[35.0, 2.0, 6.4648, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[36.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[37.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[38.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[39.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[40.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[41.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[42.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[43.0, 1.0, 78.3164, 2.9369, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[44.0, 1.0, 92.3543, 5.7814, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[45.0, 1.0, 92.3543, 5.7814, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[46.0, 1.0, 92.3543, 5.7814, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[47.0, 1.0, 92.3543, 5.7814, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[48.0, 1.0, 46.1771, 6.0123, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[49.0, 1.0, 96.0485, 6.0123, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[50.0, 1.0, 19.2097, 6.0123, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[51.0, 1.0, 55.4126, 18.4709, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[52.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[53.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[54.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[55.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[56.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[57.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[58.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[59.0, 1.0, 92.3543, 5.7814, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[107.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[108.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[109.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[110.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[111.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[112.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[113.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[114.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[115.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[116.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[117.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[118.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[119.0, 1.0, 110.8252, 55.4126, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[120.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[121.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[122.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[123.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[307.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[310.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[315.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[316.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[482.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[483.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[484.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[499.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[500.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[508.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[539.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[540.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[541.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[542.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[552.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[553.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[556.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[557.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1418.0, 1.0, 129.296, 36.9417, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1454.0, 1.0, 63.7245, 17.5473, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1473.0, 1.0, 150.5375, 27.7063, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1545.0, 1.0, 60.0303, 13.8531, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1555.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1556.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1557.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1558.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1559.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1560.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1561.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1562.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1563.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1564.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1565.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1566.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1567.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1568.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1569.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1570.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1571.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1572.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1573.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1574.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1575.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1576.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1577.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1578.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1579.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1580.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1581.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1582.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1583.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1584.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1585.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1586.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1587.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1588.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1589.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1590.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1591.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1592.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1593.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1594.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1595.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1596.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1597.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1598.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1599.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1600.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1601.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1602.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1603.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1604.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1605.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1606.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1607.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1608.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1609.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1610.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1611.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1612.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1613.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1614.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1615.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1616.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1617.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1618.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1619.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1620.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1621.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1622.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1623.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1624.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1625.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1626.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1627.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1628.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1629.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1630.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1631.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1632.0, 2.0, 6.4648, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1633.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1634.0, 2.0, 6.4648, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1635.0, 1.0, 277.0629, 33.0721, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1641.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1642.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1643.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1644.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1645.0, 2.0, 4.6177, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 6.3, 1.0, 1.1, 0.95, 0.6, 10 ],
[1646.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1647.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1648.0, 2.0, 6.4648, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1649.0, 2.0, 3.2324, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1650.0, 2.0, 6.4648, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1651.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1652.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1653.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1654.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1655.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1656.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1657.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1658.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1659.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1660.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1661.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1662.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1663.0, 3.0, 55.4126, 10.159, 0.0, 0.0, 1.0, 1.0, 0.0, 27.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1664.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1665.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1666.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1667.0, 2.0, 40.7282, 11.6366, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1668.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1669.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1670.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1671.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1672.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1673.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1674.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1675.0, 2.0, 14.5458, 4.8486, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1676.0, 2.0, 14.5458, 4.8486, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1677.0, 2.0, 14.5458, 5.2919, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1678.0, 2.0, 14.5458, 5.2919, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1679.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1680.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1681.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1682.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1683.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1684.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1685.0, 2.0, 8.7275, 3.8789, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1686.0, 2.0, 14.5458, 5.2919, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1687.0, 2.0, 14.5458, 5.2919, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1688.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1689.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1690.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1691.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1692.0, 2.0, 55.4126, 10.159, 0.0, 0.0, 1.0, 1.0, 0.0, 27.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1693.0, 2.0, 16.6238, 5.5967, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1694.0, 2.0, 16.6238, 5.5967, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1695.0, 2.0, 16.6238, 5.5967, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1696.0, 2.0, 16.6238, 5.5967, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1697.0, 2.0, 27.7063, 8.3119, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1698.0, 2.0, 27.7063, 8.3119, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1699.0, 2.0, 27.7063, 8.3119, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1700.0, 2.0, 9.2354, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1701.0, 2.0, 9.2354, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1702.0, 2.0, 16.6238, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1703.0, 2.0, 16.6238, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1704.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1705.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1706.0, 2.0, 16.6238, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 16.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1707.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1708.0, 2.0, 7.3883, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1709.0, 2.0, 7.3883, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1710.0, 2.0, 9.2354, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1711.0, 2.0, 16.6238, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1712.0, 2.0, 16.6238, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1713.0, 2.0, 13.8531, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1714.0, 2.0, 13.8531, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1715.0, 2.0, 13.8531, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1716.0, 2.0, 13.8531, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1717.0, 2.0, 44.3301, 8.3304, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1718.0, 2.0, 44.3301, 8.3304, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1719.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1720.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1721.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1722.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1723.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1724.0, 2.0, 9.2354, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1725.0, 2.0, 9.2354, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1726.0, 2.0, 9.2354, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1727.0, 2.0, 9.2354, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1728.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1729.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1730.0, 2.0, 11.0825, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1731.0, 2.0, 11.0825, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1732.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1733.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1734.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1735.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1736.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1737.0, 2.0, 11.0825, 3.3617, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1738.0, 2.0, 11.0825, 3.3247, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1739.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1740.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1741.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1742.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1743.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1744.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 22.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1745.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 22.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1746.0, 2.0, 101.5897, 32.324, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1747.0, 2.0, 9.2354, 2.7706, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1748.0, 2.0, 38.7888, 10.0759, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1749.0, 2.0, 38.7888, 10.0759, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1750.0, 2.0, 38.7888, 10.0759, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1751.0, 2.0, 38.7888, 10.0759, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1752.0, 2.0, 19.3944, 5.0333, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1754.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1755.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1756.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1757.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1758.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1759.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1760.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1761.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1762.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1763.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1764.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1765.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1766.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1767.0, 1.0, 92.3543, 6.0307, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1768.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1769.0, 1.0, 92.3543, 5.7814, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1770.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1771.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1772.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1773.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1774.0, 1.0, 50.7949, 6.631, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1775.0, 1.0, 92.3543, 5.7814, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1776.0, 1.0, 46.1771, 6.0307, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1777.0, 1.0, 78.5012, 18.4709, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1778.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1779.0, 1.0, 46.1771, 6.0307, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1780.0, 1.0, 138.5314, 20.5581, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1781.0, 1.0, 50.7949, 6.631, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1782.0, 1.0, 48.0242, 6.2709, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1783.0, 1.0, 48.0242, 6.2709, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1784.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1785.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1786.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1787.0, 1.0, 50.7949, 20.318, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1788.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1789.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1790.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1791.0, 1.0, 308.2232, 93.5641, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1792.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1793.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1794.0, 1.0, 36.9417, 9.2354, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1795.0, 1.0, 36.3968, 5.1534, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1796.0, 1.0, 92.3543, 31.4189, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1797.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1798.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1799.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1800.0, 1.0, 96.0485, 32.675, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1801.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1802.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1803.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1804.0, 1.0, 67.5479, 40.5343, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1805.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1806.0, 1.0, 24.9634, -9.4017, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1807.0, 1.0, 92.3543, 18.4709, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1808.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1809.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1810.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1811.0, 1.0, 0.0, 0.0, 0.0, -2.40000384, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1812.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1813.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1814.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1815.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1816.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1817.0, 1.0, 9.1431, 1.57, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1818.0, 1.0, 76.063, 11.5535, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1819.0, 1.0, 4.5438, 1.1359, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1820.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1821.0, 1.0, 53.1314, 11.9137, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1822.0, 1.0, 92.3543, 5.7814, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1823.0, 1.0, 46.1771, 31.4189, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1824.0, 1.0, 50.1484, 8.6813, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1825.0, 1.0, 8.7737, 1.57, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1826.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1827.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1828.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1829.0, 1.0, 221.7242, 46.2233, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1830.0, 1.0, 25.8592, 1.8471, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1831.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1832.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1833.0, 1.0, 101.5897, 33.2475, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1834.0, 1.0, 0.0, 0.0, 0.0, -1.4999925, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1835.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1836.0, 1.0, 43.9884, 12.5694, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1837.0, 1.0, 64.7127, -1.9579, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1838.0, 1.0, 6.9543, 1.6624, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1839.0, 1.0, 21.0568, 7.8501, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1840.0, 1.0, 57.2412, 11.7198, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1841.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1842.0, 1.0, 71.1128, 12.3939, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1843.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1844.0, 1.0, 27.7063, 31.4189, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1845.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1846.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1847.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1848.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1849.0, 1.0, 0.0, 0.0, 0.0, 5.74999045, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1850.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1851.0, 1.0, 0.0, 0.0, 0.0, -1.20000048, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1852.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1853.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1854.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1855.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1856.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1857.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1858.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1859.0, 1.0, 52.6419, 17.5473, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1860.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1861.0, 1.0, 92.0126, 18.8218, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1862.0, 1.0, 0.0, 0.0, 0.0, 0.64800415, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1863.0, 1.0, 0.0, 0.0, 0.0, -3.8340098, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1864.0, 1.0, 0.0, 0.0, 0.0, -1.97550375, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1865.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1866.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1867.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1868.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1869.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1870.0, 1.0, 7.9425, 1.136, 0.0, 0.0, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1871.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1872.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1873.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1874.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1875.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1876.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1877.0, 1.0, 0.0, 0.0, 0.0, -1.7999964, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1878.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1879.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1880.0, 1.0, 0.0, 0.0, 0.0, 0.599988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1881.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1882.0, 1.0, 0.0, 0.0, 0.0, -1.20000048, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1883.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1884.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1885.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1886.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1887.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1888.0, 1.0, 10.9532, 1.6347, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1889.0, 1.0, 0.0, 0.0, 0.0, -0.6000024, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1890.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1891.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1892.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1893.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1894.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1895.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1896.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1897.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1898.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1899.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1900.0, 1.0, 79.5078, 5.4489, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1901.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1902.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1903.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1904.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1905.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1906.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1907.0, 1.0, 80.3482, 20.0409, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1908.0, 1.0, 34.1711, 7.5731, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1909.0, 1.0, 52.3649, 20.946, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1910.0, 1.0, 64.648, 22.6268, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1911.0, 1.0, 104.7298, 20.946, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1912.0, 1.0, 49.3172, 12.8372, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1913.0, 1.0, 115.5629, -3.334, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1914.0, 1.0, 23.8551, 7.7208, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1915.0, 1.0, 31.2896, 10.0574, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1916.0, 1.0, 50.7949, 23.0424, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1917.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1918.0, 1.0, 192.0969, 47.0637, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1919.0, 1.0, 60.8615, -38.8812, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1920.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1921.0, 1.0, 69.6074, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1922.0, 1.0, 64.0569, 24.1137, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1923.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1924.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1925.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1926.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1927.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1928.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1929.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1930.0, 1.0, 0.0, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1931.0, 1.0, 101.5897, 6.354, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1932.0, 1.0, 53.9441, 19.5052, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1933.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1934.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1935.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1936.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1937.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1938.0, 1.0, 30.6616, 8.9584, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1939.0, 1.0, 153.8715, 23.9198, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1940.0, 1.0, 82.4724, 8.6813, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1941.0, 1.0, 96.6211, 23.4395, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1942.0, 1.0, 223.4974, 70.0969, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1943.0, 1.0, 55.4218, 9.5125, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1944.0, 1.0, 137.8295, 10.7593, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1945.0, 1.0, 52.3649, 20.946, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1946.0, 1.0, 144.7192, 22.5345, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1947.0, 1.0, 137.0538, 22.4421, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1948.0, 1.0, 176.212, 58.7373, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1949.0, 1.0, 68.7116, -0.8312, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1950.0, 1.0, 149.891, 40.8206, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1951.0, 1.0, 123.3299, 30.0613, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1952.0, 1.0, 6.3355, 1.1544, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1953.0, 1.0, 35.6672, 11.0179, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1954.0, 1.0, 122.8312, 17.5473, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1955.0, 1.0, 92.3543, 6.0307, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1956.0, 1.0, 20.6874, 6.7419, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1957.0, 1.0, 0.0, 0.0, 0.0, -2.3999952, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1958.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1959.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1960.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1961.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1962.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1963.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1964.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1965.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1966.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1967.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1968.0, 1.0, 160.5487, 9.7896, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1969.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1970.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1971.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1972.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1973.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1974.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1975.0, 1.0, 0.0, 0.0, 0.0, -1.08843537, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1976.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1977.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1978.0, 1.0, 202.2744, 23.5319, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1979.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1980.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1981.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1982.0, 1.0, 17.2703, 6.1878, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1983.0, 1.0, 44.2377, 19.4868, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1984.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1985.0, 1.0, 269.3051, 108.7195, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1986.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1987.0, 1.0, 0.0, 0.0, 0.0, -1.23967967, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1988.0, 1.0, 179.9985, 34.0787, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1989.0, 1.0, 68.3422, 24.0121, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1990.0, 1.0, 110.8344, 40.9407, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1991.0, 1.0, 145.3749, 56.9918, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1992.0, 1.0, 115.4429, 14.0379, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1993.0, 1.0, 50.8872, 23.8274, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1994.0, 1.0, 107.9622, 18.3785, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1995.0, 1.0, 99.1885, 31.6775, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1996.0, 1.0, 0.0, 0.0, 0.0, -2.999994, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1997.0, 1.0, 0.0, 0.0, 0.0, -1.7999964, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1998.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1999.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2000.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2001.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2002.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2003.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2004.0, 1.0, 99.6503, 23.6427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2005.0, 1.0, 34.9099, 5.9107, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2006.0, 1.0, 159.7729, 47.193, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2007.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2008.0, 1.0, 115.1658, 14.121, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2009.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2010.0, 1.0, 0.0, 0.0, 0.0, 13.8608871, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2011.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2012.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2013.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2014.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2015.0, 1.0, 127.3566, 4.3222, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2016.0, 1.0, 74.2436, 13.2528, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2017.0, 1.0, 0.0, 0.0, 0.0, 0.599988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2018.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2019.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2020.0, 1.0, 42.9725, 13.3083, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2021.0, 1.0, 101.1834, 15.7464, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2022.0, 1.0, 0.0, 0.0, 0.0, 1.29600829, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2023.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2024.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2025.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2026.0, 1.0, 88.8448, 9.0507, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2027.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2028.0, 1.0, 165.5912, 27.7986, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2029.0, 1.0, 73.8834, 23.6427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2030.0, 1.0, 103.4368, 2.7706, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2031.0, 1.0, 0.0, 0.0, 0.0, -0.9000009, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2032.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2033.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2034.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2035.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2036.0, 1.0, 108.1469, 21.4262, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2037.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2038.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2039.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2040.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2041.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2042.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2043.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2044.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2045.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2046.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2047.0, 1.0, 119.9682, -16.6145, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2048.0, 1.0, 13.7331, 3.1678, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2049.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2050.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2051.0, 1.0, 120.0606, 18.4709, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2052.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2053.0, 1.0, 292.3013, 57.2597, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2054.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2055.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2056.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2057.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2058.0, 1.0, 90.9875, 11.2488, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2059.0, 1.0, 76.1092, 14.7675, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2060.0, 1.0, 225.1505, 78.2703, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2061.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2062.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2063.0, 1.0, 102.3286, 19.8562, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2064.0, 1.0, 51.506, 10.4637, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2065.0, 1.0, 98.542, 27.4292, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2066.0, 1.0, 153.3081, 24.0121, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2067.0, 1.0, 143.9803, 27.891, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2068.0, 1.0, 100.6662, 11.4519, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2069.0, 1.0, 183.471, 34.2357, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2070.0, 1.0, 247.6019, 56.8903, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2071.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2072.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2073.0, 1.0, 125.5372, 54.84, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2074.0, 1.0, 88.3831, 28.7222, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2075.0, 1.0, 172.7025, 43.8683, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2076.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2077.0, 1.0, 0.0, 0.0, 0.0, 0.900009, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2078.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2079.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2080.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2081.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2082.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2083.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2084.0, 1.0, 96.2332, 24.6586, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2085.0, 1.0, 51.349, 19.3021, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2086.0, 1.0, 78.1317, 16.3467, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2087.0, 1.0, 131.5125, 40.4512, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2088.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2089.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2090.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2091.0, 1.0, 119.8759, -13.8162, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2092.0, 1.0, 128.5572, 42.3906, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2093.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2094.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2095.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2096.0, 1.0, 10.473, 3.805, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2097.0, 1.0, 94.9402, 37.5882, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2098.0, 1.0, 90.4149, 31.4928, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2099.0, 1.0, 94.3769, 20.5211, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2100.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2101.0, 1.0, 174.9375, 49.9821, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2102.0, 1.0, 209.3579, 73.3847, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2103.0, 1.0, 151.8766, 15.34, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2104.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2105.0, 1.0, 304.5568, 97.8956, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2106.0, 1.0, 71.2698, 2.7429, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2107.0, 1.0, 73.9481, 25.6745, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2108.0, 1.0, 352.9781, 63.078, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2109.0, 1.0, 278.91, 38.7888, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2110.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2111.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2112.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2113.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2114.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2115.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2116.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2117.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2118.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2119.0, 1.0, 30.5693, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2120.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2121.0, 1.0, 355.564, 81.2718, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2122.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2123.0, 1.0, 114.1868, 34.8453, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2124.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2125.0, 1.0, 226.4435, 71.2144, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2126.0, 1.0, 281.1265, 43.8683, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2127.0, 1.0, 147.5822, 40.0818, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2128.0, 1.0, 165.4897, 16.5222, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2129.0, 1.0, 15.0076, 5.9569, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2130.0, 1.0, 126.5254, 30.4769, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2131.0, 1.0, 0.7111, 2.2626, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2132.0, 1.0, 111.2315, 32.0562, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2133.0, 1.0, 199.1159, 6.0954, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2134.0, 1.0, 83.1189, 21.2415, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2135.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2136.0, 1.0, 0.0, 0.0, 0.0, -1.23967967, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2137.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2138.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2139.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2140.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2141.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2142.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2143.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2144.0, 1.0, 0.0, 0.0, 0.0, -1.500015, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2145.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2146.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2147.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2148.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2149.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2150.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2151.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2152.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2153.0, 1.0, 126.9964, 41.0977, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2154.0, 1.0, 99.0962, 11.3596, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2155.0, 1.0, 191.2657, 38.6041, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2156.0, 1.0, 63.9092, 14.7767, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2157.0, 1.0, 36.3876, 20.8721, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2158.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2159.0, 1.0, 48.1166, 10.7131, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2160.0, 1.0, 72.1287, 20.7797, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2161.0, 1.0, 243.4459, 38.1423, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2162.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2163.0, 1.0, 171.3542, 20.8444, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2164.0, 1.0, 110.6404, 9.5125, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2165.0, 1.0, 40.6359, 3.7865, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2166.0, 1.0, 163.3747, 40.0818, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2167.0, 1.0, 80.1635, 16.6238, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2168.0, 1.0, 97.9694, 25.5452, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2169.0, 1.0, 187.6824, 14.4073, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2170.0, 1.0, 190.2498, 31.4005, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2171.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2172.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2173.0, 1.0, 147.342, 35.4456, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2174.0, 1.0, 297.8426, 76.1923, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2175.0, 1.0, 201.7941, 75.2687, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2176.0, 1.0, 227.1916, 4.7101, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2177.0, 1.0, 191.8199, 31.1234, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2178.0, 1.0, 234.9493, 68.5269, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2179.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2180.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2181.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2182.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2183.0, 1.0, 71.1682, 15.7556, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2184.0, 1.0, 118.232, 18.3416, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2185.0, 1.0, 118.7676, 45.7154, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2186.0, 1.0, 166.2377, 30.6616, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2187.0, 1.0, 203.1794, 45.7154, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2188.0, 1.0, 201.8865, 48.9478, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2189.0, 1.0, 90.1378, 10.0666, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2190.0, 1.0, 131.7896, 9.3278, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2191.0, 1.0, 137.0538, 39.1582, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2192.0, 1.0, 165.776, -3.0754, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2193.0, 1.0, 224.4209, 26.7827, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2194.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2195.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2196.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2197.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2198.0, 1.0, 257.8901, 62.7178, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2199.0, 1.0, 55.2279, 15.0815, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2200.0, 1.0, 116.8928, 27.6601, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2201.0, 1.0, 241.4695, 38.1331, 1e-07, -9.9e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2202.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2203.0, 1.0, 53.5655, 14.7767, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2204.0, 1.0, 244.7389, 85.8895, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2205.0, 1.0, 35.0946, 7.3883, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2206.0, 1.0, 145.3657, 55.2279, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2207.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2208.0, 1.0, 52.919, 16.9008, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2209.0, 1.0, 154.6934, 61.1385, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2210.0, 1.0, 58.922, 27.7986, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2211.0, 1.0, 66.218, 10.9902, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2212.0, 1.0, 71.2975, 20.6874, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2213.0, 1.0, 29.6457, 8.0348, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2214.0, 1.0, 197.4535, 65.2945, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2215.0, 1.0, 105.838, 29.7381, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2216.0, 1.0, 75.084, 21.6109, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2217.0, 1.0, 297.3808, 97.4338, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2218.0, 1.0, 76.6541, 32.324, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2219.0, 1.0, 46.1771, 6.0307, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2220.0, 1.0, 124.5859, 26.2286, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2221.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2222.0, 1.0, 136.3888, 20.0778, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2223.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2224.0, 1.0, 128.8342, 19.1173, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2225.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2226.0, 1.0, 168.0848, 76.6541, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2227.0, 1.0, 193.944, 77.5776, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2228.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2229.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2230.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2231.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2232.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2233.0, 1.0, 95.5867, 8.9584, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2234.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ]
])
ppc["gen"] = array([
[1634.0, 40.0, 44.7, 68.2, 0.0, 1.07, 100.0, 1.0, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 129.41, 22.0, 33.0, 33.0, 44.0 ],
[1632.0, 60.0, 43.6, 68.2, 0.0, 1.07, 100.0, 0.0, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 129.41, 22.0, 33.0, 33.0, 44.0 ],
[1629.0, 90.0, 40.8, 77.46, 0.0, 1.07, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 25.0, 37.5, 37.5, 50.0 ],
[1685.0, 154.8, 75.3, 80.0, 0.0, 1.07, 100.0, 1.0, 157.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],
[1706.0, 282.3, 96.3, 185.9, 0.0, 1.07, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.0, 60.0, 90.0, 90.0, 120.0 ],
[1747.0, 79.0, 23.2, 41.5, 0.0, 1.0, 100.0, 0.0, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 88.8888, 15.0, 22.5, 22.5, 30.0 ],
[1746.0, 77.8, 18.4, 41.5, 0.0, 1.0, 100.0, 0.0, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 88.8888, 15.0, 22.5, 22.5, 30.0 ],
[31.0, 100.0, 12.6, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[30.0, 100.0, 12.6, 62.0, 0.0, 1.0, 100.0, 0.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[23.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[4.0, 7.1, 1.8, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1666.0, 193.0, 107.7, 185.9, 0.0, 1.0, 100.0, 1.0, 367.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.7, 70.0, 105.0, 105.0, 140.0 ],
[1665.0, 264.8, 115.6, 185.9, 0.0, 1.0, 100.0, 1.0, 367.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.7, 70.0, 105.0, 105.0, 140.0 ],
[1745.0, 234.1, 26.6, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1744.0, 231.6, 46.9, 216.9, 0.0, 1.02, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1743.0, 258.5, 46.6, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1742.0, 263.3, 101.2, 216.9, 0.0, 1.02, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1664.0, 350.0, 34.0, 216.9, 0.0, 1.015, 100.0, 0.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[26.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[28.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[19.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1741.0, 283.9, 41.3, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1740.0, 262.8, 32.8, 216.9, 0.0, 1.03, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1670.0, 219.8, 92.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1669.0, 299.8, 103.9, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1687.0, 297.4, 102.2, 185.9, 0.0, 1.01, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1686.0, 297.7, 86.4, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1729.0, 266.4, 133.3, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1728.0, 225.0, 140.2, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1696.0, 209.0, 112.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1695.0, 209.0, 89.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1690.0, 133.1, 0.0, 88.0, 0.0, 1.0, 100.0, 1.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1659.0, 22.2, -0.9, 62.0, 0.0, 1.0, 100.0, 1.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1738.0, 134.2, 51.3, 50.0, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1737.0, 155.4, 40.6, 50.0, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1707.0, 264.3, 28.2, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1752.0, 254.3, 31.4, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[13.0, 90.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1703.0, 93.2, 0.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[1702.0, 144.4, 17.6, 123.9, 0.0, 1.0, 100.0, 0.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[1704.0, 107.3, 0.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[1705.0, 107.7, 9.9, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[34.0, 30.0, 20.0, 35.0, 0.0, 1.003, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 40.0, 6.0, 9.0, 9.0, 12.0 ],
[33.0, 30.0, 20.0, 35.0, 0.0, 1.0, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 40.0, 6.0, 9.0, 9.0, 12.0 ],
[1678.0, 257.9, 99.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1677.0, 128.6, 88.6, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1655.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],
[27.0, 48.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1657.0, 90.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1650.0, 1068.2, 202.5, 600.0, 0.0, 1.0, 100.0, 1.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1278.0, 223.6, 335.4, 335.4, 447.2 ],
[1648.0, 1000.0, 300.0, 600.0, 0.0, 1.0, 100.0, 1.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1277.778, 230.0, 345.0, 345.0, 460.0 ],
[35.0, 1118.0, 300.0, 600.0, 0.0, 1.0, 100.0, 0.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1278.0, 223.6, 335.4, 335.4, 447.2 ],
[1682.0, 246.6, 95.4, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[1681.0, 275.9, 100.9, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[2116.0, 58.3, 2.4, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],
[2114.0, 67.9, 2.3, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],
[2113.0, 67.0, 4.7, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],
[2112.0, 32.2, 5.0, 5.0, 0.0, 1.0, 100.0, 0.0, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 41.14, 7.2, 10.8, 10.8, 14.4 ],
[2110.0, 32.6, 5.4, 5.0, 0.0, 1.0, 100.0, 0.0, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 41.14, 7.2, 10.8, 10.8, 14.4 ],
[1736.0, 30.2, 5.9, 20.0, 0.0, 1.0, 100.0, 0.0, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 49.412, 8.4, 12.6, 12.6, 16.8 ],
[1735.0, 30.8, 6.3, 20.0, 0.0, 1.0, 100.0, 0.0, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 49.412, 8.4, 12.6, 12.6, 16.8 ],
[1734.0, 200.0, 88.0, 123.9, 0.0, 1.0, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1733.0, 200.0, 123.9, 123.9, 0.0, 1.03, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1732.0, 130.3, 19.7, 123.9, 0.0, 1.0, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1694.0, 212.5, 27.6, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1693.0, 215.3, 38.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[25.0, 48.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1701.0, 472.5, 159.0, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1700.0, 563.6, 210.1, 290.6, 0.0, 1.03, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1652.0, 50.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0073, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1645.0, 50.0, 20.0, 60.0, 0.0, 1.03, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],
[24.0, 50.0, 20.0, 60.0, 0.0, 1.03, 100.0, 0.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],
[1656.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 1.0, 0.0282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],
[14.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 0.0, 0.0065, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],
[1679.0, 140.0, 9.6, 62.0, 0.0, 1.0, 100.0, 1.0, 0.0158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[116.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[18.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[17.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[16.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[15.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1612.0, 80.6, 23.4, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[1609.0, 85.9, 28.5, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[1691.0, 100.8, 44.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.471, 30.0, 45.0, 45.0, 60.0 ],
[1662.0, 106.9, 43.8, 123.9, 0.0, 1.0, 100.0, 0.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.471, 30.0, 45.0, 45.0, 60.0 ],
[1731.0, 119.9, 64.6, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1730.0, 121.8, 59.9, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1649.0, 200.0, 180.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[32.0, 200.0, 34.0, 216.9, 0.0, 1.015, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1651.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[1653.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[1654.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[1674.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[20.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.1057, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1668.0, 600.0, 283.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1727.0, 200.0, 54.0, 130.1, 0.0, 0.98, 100.0, 0.0, 210.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 247.06, 42.0, 63.0, 63.0, 84.0 ],
[1726.0, 120.7, 61.9, 123.9, 0.0, 0.98, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1697.0, 450.0, 154.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1643.0, 345.0, 100.0, 62.0, 0.0, 1.0, 100.0, 0.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1725.0, 142.8, 36.0, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1724.0, 138.7, 67.0, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1710.0, 128.8, 69.5, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.294, 40.0, 60.0, 60.0, 80.0 ],
[1672.0, 184.5, 123.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1671.0, 181.3, 127.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1723.0, 34.9, 3.9, 20.0, 0.0, 1.0, 100.0, 0.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],
[1722.0, 90.0, 1.0, 50.0, 0.0, 1.01, 100.0, 1.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1721.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1720.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1719.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1646.0, 125.0, 40.0, 80.0, 0.0, 1.03, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],
[1647.0, 125.0, 40.0, 80.0, 0.0, 1.03, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],
[1676.0, 159.5, 85.5, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1675.0, 159.5, 79.9, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1718.0, 610.2, 90.7, 387.5, 0.0, 1.0, 100.0, 1.0, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 888.89, 160.0, 240.0, 240.0, 320.0 ],
[1717.0, 574.5, 167.0, 387.5, 0.0, 1.0, 100.0, 1.0, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 888.89, 160.0, 240.0, 240.0, 320.0 ],
[1692.0, 1004.3, 224.5, 484.0, 0.0, 1.0, 100.0, 1.0, 1000.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1120.0, 201.6, 302.4, 302.4, 403.2 ],
[1663.0, 814.4, 190.8, 484.0, 0.0, 1.0, 100.0, 1.0, 1000.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1120.0, 201.6, 302.4, 302.4, 403.2 ],
[1709.0, 105.1, 50.2, 77.46, 0.0, 1.03, 100.0, 1.0, 135.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 27.0, 40.5, 40.5, 54.0 ],
[1708.0, 101.3, 47.1, 77.46, 0.0, 1.03, 100.0, 1.0, 135.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 27.0, 40.5, 40.5, 54.0 ],
[5.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 1.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[29.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[2042.0, 39.5, 8.5, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[2040.0, 38.7, 4.5, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[2039.0, 39.0, 4.8, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[2037.0, 40.1, 6.6, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[1599.0, 50.0, 27.0, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 60.0, 10.0, 15.0, 15.0, 20.0 ],
[1597.0, 50.0, 27.0, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 60.0, 10.0, 15.0, 15.0, 20.0 ],
[1661.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1699.0, 597.1, 168.2, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1698.0, 551.0, 167.2, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1714.0, 213.5, 57.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1713.0, 235.0, 71.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1716.0, 222.7, 53.2, 185.9, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1715.0, 202.3, 59.3, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1680.0, 20.6, 6.6, 4.95, -0.0, 1.0, 100.0, 1.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 49.5, 9.9, 14.85, 14.85, 19.8 ],
[1658.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[21.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1667.0, 594.9, 157.8, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1673.0, 600.0, 137.0, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1712.0, 256.7, 92.1, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1711.0, 256.7, 75.7, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1749.0, 564.0, 103.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1748.0, 543.0, 116.0, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1684.0, 235.0, 80.0, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[1683.0, 234.4, 74.8, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[22.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 1.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1660.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1689.0, 114.9, -7.7, 62.0, 0.0, 1.0, 100.0, 1.0, 0.0253, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[117.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[110.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[108.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0154, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1688.0, 91.2, -3.3, 62.0, 0.0, 1.0, 100.0, 1.0, 0.1089, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[118.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[111.0, 50.0, 10.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.2595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[107.0, 50.0, 10.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.6456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1751.0, 497.9, 119.0, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1750.0, 506.0, 142.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ]
])
ppc["branch"] = array([
[1418.0, 2021.0, 0.000709, 0.03936, 0.0061, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[541.0, 2024.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[540.0, 2024.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1545.0, 1418.0, 0.00764, 0.040964, 0.06498, 70.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1545.0, 1418.0, 0.007179, 0.042257, 0.064288, 70.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1545.0, 2021.0, 0.0124, 0.0812, 0.1232, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[542.0, 1960.0, 0.001528, 0.02064, 2.0724, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[539.0, 1960.0, 0.00172, 0.02296, 2.21372, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2234.0, 2233.0, 0.0, 0.187, 0.281, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1870.0, 1871.0, 0.0055, 0.2, 0.3, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1804.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1804.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1913.0, 0.002785, 0.020342, 0.06345, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1913.0, 0.002804, 0.020317, 0.063616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 2193.0, 0.0007, 0.0031, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 2193.0, 0.0007, 0.0031, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1869.0, 2170.0, 0.0, 0.0001, 0.0002, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 2231.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1962.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1988.0, 0.00046, 0.003737, 0.012788, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1988.0, 0.000424, 0.003818, 0.01291, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1993.0, 0.001928, 0.011229, 0.034974, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1993.0, 0.001775, 0.011229, 0.034426, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1824.0, 0.00242, 0.01694, 0.049586, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1824.0, 5e-06, 3.5e-05, 2.4e-05, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1839.0, 0.000545, 0.004212, 0.013316, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1839.0, 0.000541, 0.004268, 0.013416, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1966.0, 1965.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1966.0, 1961.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1966.0, 2034.0, 0.000436, 0.005137, 0.500594, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1763.0, 2099.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2192.0, 1782.0, 0.002004, 0.011367, 0.016964, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2192.0, 1840.0, 0.001859, 0.011245, 0.03521, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2192.0, 1840.0, 0.001995, 0.011437, 0.033768, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1794.0, 2208.0, 0.002049, 0.019073, 0.054854, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1794.0, 2026.0, 0.004879, 0.030837, 0.09544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1796.0, 2220.0, 0.001408, 0.006842, 0.024408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1796.0, 2220.0, 0.001394, 0.006874, 0.024286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1999.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1998.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2153.0, 0.008206, 0.048173, 0.133258, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2153.0, 0.007348, 0.042683, 0.114282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2152.0, 0.007455, 0.049655, 0.13954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1776.0, 0.007141, 0.033921, 0.09508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2065.0, 0.0017, 0.0076, 0.0198, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2065.0, 0.0018, 0.00704, 0.0182, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2004.0, 0.0041, 0.0196, 0.0546, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1989.0, 0.005358, 0.0248, 0.0503, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1989.0, 0.004066, 0.021045, 0.057736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2036.0, 0.0139, 0.0491, 0.1352, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2000.0, 1931.0, 0.001403, 0.007678, 0.020786, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 2002.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 2001.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 115.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 1970.0, 0.000812, 0.015612, 1.68775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 1972.0, 0.000816, 0.015984, 1.68775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 1789.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 483.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[115.0, 109.0, 0.001236, 0.013293, 1.480528, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2191.0, 1837.0, 0.001635, 0.012705, 0.037662, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2191.0, 1818.0, 0.01022, 0.042629, 0.06611, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2226.0, 2210.0, 0.001173, 0.005248, 0.008748, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2226.0, 2190.0, 0.00036, 0.0073, 0.0134, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2189.0, 2188.0, 0.0023, 0.0078, 0.0138, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2189.0, 1907.0, 0.002424, 0.014193, 0.040774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2189.0, 2187.0, 0.007996, 0.039339, 0.110062, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2186.0, 2217.0, 0.0055, 0.0238, 0.0364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2186.0, 1956.0, 0.002, 0.01, 0.016, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2186.0, 2185.0, 0.0028, 0.0141, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2219.0, 2218.0, 0.002676, 0.015582, 0.050366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2219.0, 2218.0, 0.002791, 0.015447, 0.050366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1796.0, 0.001819, 0.009567, 0.03228, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1796.0, 0.00179, 0.009574, 0.03228, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2219.0, 0.001167, 0.006646, 0.023698, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2219.0, 0.001154, 0.006607, 0.023536, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2215.0, 0.0029, 0.0172, 0.0498, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2215.0, 0.003, 0.0174, 0.0496, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1947.0, 0.00434, 0.02042, 0.09428, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2221.0, 2216.0, 0.0005, 0.00293, 0.008814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2216.0, 0.0005, 0.00293, 0.008814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1938.0, 0.001983, 0.0125, 0.038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2217.0, 0.0026, 0.0159, 0.045, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2217.0, 0.0025, 0.0156, 0.04604, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1956.0, 0.001996, 0.015004, 0.049722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1956.0, 0.001942, 0.015223, 0.048658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2214.0, 0.00705, 0.0366, 0.0638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1970.0, 122.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1970.0, 2032.0, 0.001038, 0.010782, 0.99978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 112.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 1970.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 1971.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 2034.0, 0.000863, 0.008857, 0.583716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[122.0, 121.0, 0.000863, 0.008857, 0.583716, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1898.0, 1970.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1898.0, 122.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1898.0, 120.0, 0.001351, 0.015445, 1.51142, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1896.0, 1972.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1896.0, 1897.0, 0.001355, 0.017948, 1.76, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2184.0, 2169.0, 0.002551, 0.012, 0.032826, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2184.0, 2169.0, 0.002288, 0.012288, 0.051244, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2203.0, 2134.0, 0.0149, 0.0858, 0.1412, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2203.0, 1949.0, 0.0105, 0.05925, 0.0525, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2203.0, 2208.0, 0.00447, 0.02537, 0.03784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2183.0, 2222.0, 0.001446, 0.009469, 0.030074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 1473.0, 0.0218, 0.0638, 0.066, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2212.0, 1831.0, 0.004731, 0.023671, 0.047954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 2097.0, 0.003778, 0.017949, 0.05031, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2212.0, 2182.0, 0.0035, 0.0205, 0.0556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 2182.0, 0.007552, 0.0302, 0.046742, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 1909.0, 0.004017, 0.028224, 0.081516, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2181.0, 57.0, 1e-06, 1e-06, 2e-06, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2181.0, 2209.0, 0.0143, 0.075, 0.1148, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2181.0, 2180.0, 0.0006, 0.0032, 0.005, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2181.0, 2179.0, 0.0052, 0.0259, 0.038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1770.0, 1912.0, 0.0004, 0.003044, 0.009322, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1912.0, 0.0004, 0.003044, 0.009322, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2155.0, 0.000856, 0.006515, 0.019094, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2155.0, 0.000856, 0.006515, 0.019094, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2224.0, 0.00164, 0.012482, 0.036582, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2224.0, 0.00164, 0.012482, 0.036582, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2030.0, 0.001344, 0.010229, 0.02998, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2030.0, 0.001344, 0.010229, 0.02998, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1940.0, 0.001313, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1940.0, 0.001313, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1772.0, 1771.0, 0.000697, 0.008904, 0.966246, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1772.0, 1771.0, 0.000697, 0.008904, 0.966246, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1944.0, 42.0, 0.003347, 0.019091, 0.05291, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1944.0, 1888.0, 0.00452, 0.021267, 0.06035, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1944.0, 1888.0, 0.0033, 0.021, 0.061034, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[40.0, 2157.0, 0.002254, 0.015419, 0.044362, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 1985.0, 0.0004, 0.0018, 0.0044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 1985.0, 0.0004, 0.0018, 0.0044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2193.0, 0.0003, 0.0017, 0.004, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2193.0, 0.0003, 0.0025, 0.005, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2090.0, 0.0019, 0.0086, 0.0214, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2087.0, 0.0008, 0.0055, 0.0142, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2226.0, 0.002291, 0.017079, 0.050654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2226.0, 0.00258, 0.018126, 0.05235, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 1856.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2227.0, 0.004044, 0.029321, 0.090328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2227.0, 0.003984, 0.029357, 0.09127, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2074.0, 0.001113, 0.006391, 0.02179, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2074.0, 0.001088, 0.006441, 0.021698, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1813.0, 1928.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1812.0, 1924.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 1970.0, 0.0012, 0.015315, 1.662034, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1928.0, 1972.0, 0.0012, 0.015315, 1.662034, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1928.0, 1855.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 1790.0, 0.0005, 0.009109, 0.977482, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1928.0, 1790.0, 0.000499, 0.009108, 0.977482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 2034.0, 0.000494, 0.009033, 0.96659, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 2024.0, 0.000363, 0.006412, 0.672766, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1912.0, 2155.0, 0.000721, 0.003805, 0.023416, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2175.0, 0.0018, 0.0107, 0.0208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2175.0, 0.0013, 0.0109, 0.0364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2174.0, 0.003659, 0.01587, 0.045896, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2177.0, 2176.0, 0.001, 0.004, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2176.0, 0.0009, 0.0039, 0.00888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2173.0, 2171.0, 0.0049, 0.0203, 0.0352, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2173.0, 2172.0, 0.0014, 0.0089, 0.0272, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1810.0, 1939.0, 0.000764, 0.005558, 0.06534, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1810.0, 2202.0, 0.001198, 0.009194, 0.095348, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 2168.0, 0.002645, 0.016233, 0.122918, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 1829.0, 0.000831, 0.007075, 0.049208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 2169.0, 0.0006, 0.0048, 0.0144, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2171.0, 2169.0, 0.0007, 0.005, 0.0146, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 1941.0, 0.0005, 0.003, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1809.0, 2218.0, 0.000453, 0.005, 0.0074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1809.0, 2218.0, 0.000453, 0.005, 0.0074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[53.0, 1909.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[55.0, 1909.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[36.0, 1831.0, 0.001722, 0.010968, 0.017098, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2167.0, 1982.0, 0.0036, 0.0317, 0.0886, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2167.0, 1983.0, 0.00206, 0.01115, 0.01946, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 1908.0, 0.000426, 0.002537, 0.00866, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 1908.0, 0.00045, 0.002581, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 2161.0, 0.001, 0.006138, 0.017238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 2161.0, 0.001, 0.00539, 0.01767, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1794.0, 0.004382, 0.027697, 0.085722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1794.0, 0.003049, 0.028391, 0.081652, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1887.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2166.0, 0.003412, 0.01859, 0.035532, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2209.0, 0.005598, 0.030473, 0.051208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2209.0, 0.005475, 0.032322, 0.077422, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1908.0, 0.005469, 0.034514, 0.10096, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1908.0, 0.005539, 0.034934, 0.100658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2164.0, 0.00228, 0.015838, 0.046554, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2208.0, 0.005808, 0.044554, 0.131736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2026.0, 0.014736, 0.08342, 0.159408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1928.0, 0.001024, 0.01164, 1.045364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1928.0, 0.00083, 0.011237, 1.038556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1886.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1814.0, 0.00049, 0.005109, 0.49856, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 2164.0, 0.0019, 0.0094, 0.0118, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 2165.0, 0.0011, 0.006921, 0.0214, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 2165.0, 0.001254, 0.006957, 0.020732, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 1783.0, 0.018061, 0.104849, 0.16225, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2166.0, 2163.0, 0.02, 0.128, 0.184, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1841.0, 1925.0, 0.002005, 0.015458, 0.048382, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1841.0, 1925.0, 0.001952, 0.015406, 0.048262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2160.0, 1842.0, 0.009545, 0.050416, 0.0775, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2160.0, 1910.0, 0.001505, 0.00955, 0.029252, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2159.0, 2156.0, 0.0024, 0.0141, 0.0394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2156.0, 0.002467, 0.012564, 0.036174, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2158.0, 0.0036, 0.0224, 0.0614, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2157.0, 0.0066, 0.0357, 0.056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2157.0, 0.0066, 0.0357, 0.066724, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1906.0, 2156.0, 0.001131, 0.010327, 0.03263, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1906.0, 2156.0, 0.00134, 0.010137, 0.032934, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 2232.0, 0.002, 0.011176, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 2232.0, 0.002, 0.011176, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 2154.0, 0.000957, 0.004942, 0.015, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 1940.0, 0.0013, 0.0068, 0.06552, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 1995.0, 0.007107, 0.034738, 0.060772, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 1995.0, 0.004876, 0.023832, 0.041692, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 2185.0, 0.002149, 0.010502, 0.018372, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 2185.0, 0.00157, 0.007675, 0.013426, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2188.0, 2228.0, 0.0032, 0.0124, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2188.0, 2228.0, 0.003, 0.0143, 0.0408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2153.0, 2152.0, 0.0053, 0.0319, 0.0654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1987.0, 2003.0, 0.00057, 0.005567, 0.51967, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2151.0, 2150.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2151.0, 2149.0, 0.0003, 0.0024, 0.0064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2151.0, 2149.0, 0.0003, 0.0024, 0.0064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2148.0, 2147.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2148.0, 2146.0, 0.0003, 0.0024, 0.0062, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2143.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2142.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2141.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2144.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2142.0, 1987.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2142.0, 2139.0, 0.0016, 0.0178, 1.672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2142.0, 2140.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2141.0, 2138.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2142.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2141.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2135.0, 0.0015, 0.0181, 1.6626, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2136.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1807.0, 2106.0, 0.001225, 0.00965, 0.029664, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2156.0, 51.0, 0.00113, 0.008562, 0.02454, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2156.0, 51.0, 0.001024, 0.007755, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2156.0, 2130.0, 0.008293, 0.046318, 0.129332, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2175.0, 2207.0, 0.001095, 0.007076, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2175.0, 2207.0, 0.001116, 0.007079, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2175.0, 1784.0, 0.000787, 0.004344, 0.014244, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2175.0, 1784.0, 0.000787, 0.004344, 0.014244, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1947.0, 2220.0, 0.000603, 0.003376, 0.009118, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1947.0, 2220.0, 0.000475, 0.00314, 0.009422, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2209.0, 2134.0, 0.0137, 0.0773, 0.1374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2209.0, 2208.0, 0.00517, 0.0294, 0.04392, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1791.0, 0.000869, 0.007208, 0.024548, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1791.0, 0.000738, 0.007235, 0.024668, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1990.0, 0.001151, 0.007729, 0.026286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1990.0, 0.000871, 0.007813, 0.026216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 48.0, 0.005823, 0.027349, 0.07467, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 48.0, 0.005823, 0.027349, 0.07467, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1842.0, 0.001531, 0.010085, 0.030386, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1842.0, 0.001531, 0.010085, 0.030386, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 2228.0, 0.007567, 0.040931, 0.114362, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2094.0, 2228.0, 0.006829, 0.035599, 0.10737, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2094.0, 2228.0, 0.010092, 0.044787, 0.083766, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1.0, 0.006166, 0.027296, 0.045504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1937.0, 1792.0, 0.0, 1e-06, 0.0, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 2133.0, 0.00124, 0.008152, 0.014254, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 1774.0, 0.005207, 0.03944, 0.113034, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1792.0, 2123.0, 0.00124, 0.01052, 0.018254, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1792.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1792.0, 1774.0, 0.005207, 0.03944, 0.113034, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1901.0, 1913.0, 0.0037, 0.0294, 0.085666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1802.0, 1913.0, 0.002304, 0.015628, 0.04459, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 2132.0, 0.002, 0.0066, 0.0096, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 2131.0, 0.002, 0.0084, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 2131.0, 0.0027, 0.009, 0.0144, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1820.0, 1821.0, 0.003241, 0.020126, 0.057066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[59.0, 1804.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[58.0, 1804.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 2226.0, 0.0006, 0.00225, 0.007, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 2226.0, 0.0006, 0.00225, 0.007, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 1955.0, 0.000528, 0.005104, 0.00836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 1955.0, 0.000528, 0.005104, 0.00836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2216.0, 2214.0, 0.0072, 0.0325, 0.047, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1854.0, 2128.0, 0.00069, 0.004434, 0.014444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1854.0, 2198.0, 0.002688, 0.016159, 0.048504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1854.0, 2172.0, 0.000758, 0.004368, 0.015356, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1854.0, 2172.0, 0.000706, 0.004367, 0.015052, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2200.0, 1943.0, 0.0003, 0.0029, 0.00475, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 557.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 556.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 553.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 552.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 2009.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2130.0, 51.0, 0.006325, 0.047909, 0.137306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2130.0, 2156.0, 0.006231, 0.047431, 0.139012, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2130.0, 2129.0, 0.008403, 0.052574, 0.08514, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2130.0, 2129.0, 0.008106, 0.03814, 0.0886, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2128.0, 1840.0, 0.001822, 0.010859, 0.032462, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2211.0, 2210.0, 0.0043, 0.0204, 0.0302, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[46.0, 1925.0, 0.007438, 0.056343, 0.161476, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[46.0, 2166.0, 0.005702, 0.043196, 0.123798, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[46.0, 1783.0, 0.005678, 0.043008, 0.12326, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2210.0, 1910.0, 0.004774, 0.033037, 0.094882, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2127.0, 2225.0, 0.0016, 0.0087, 0.0092, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2127.0, 1824.0, 0.002094, 0.01628, 0.048262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1837.0, 43.0, 0.002851, 0.021598, 0.0619, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1837.0, 43.0, 0.002851, 0.021598, 0.0619, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1837.0, 3.0, 0.007298, 0.023277, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1826.0, 1827.0, 0.002963, 0.017781, 0.051432, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2168.0, 2172.0, 0.001353, 0.007979, 0.09775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2126.0, 2177.0, 0.001083, 0.006426, 0.017174, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2125.0, 2133.0, 0.001, 0.0066, 0.01932, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2125.0, 2133.0, 0.0011, 0.0066, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2125.0, 2124.0, 0.001048, 0.007655, 0.021428, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2125.0, 2124.0, 0.001064, 0.007566, 0.02158, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1806.0, 1968.0, 0.004027, 0.025987, 0.06444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1806.0, 1968.0, 0.006024, 0.031897, 0.07314, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1777.0, 0.002361, 0.01109, 0.030276, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1777.0, 0.002361, 0.01109, 0.030276, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 2036.0, 0.001453, 0.011009, 0.031552, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 2036.0, 0.001453, 0.011009, 0.031552, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1817.0, 0.002715, 0.020567, 0.058944, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1817.0, 0.002715, 0.020567, 0.058944, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[54.0, 2064.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1800.0, 1944.0, 0.00362, 0.02356, 0.070238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1800.0, 1944.0, 0.00362, 0.02356, 0.070238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1917.0, 1978.0, 0.001756, 0.012722, 0.039038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1917.0, 1978.0, 0.001756, 0.012768, 0.039174, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2193.0, 2232.0, 0.00036, 0.00247, 0.008304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2193.0, 2232.0, 0.00036, 0.002473, 0.008404, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1793.0, 1831.0, 0.004018, 0.02119, 0.031322, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1952.0, 1951.0, 0.00445, 0.02678, 0.0424, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1973.0, 0.001166, 0.01489, 1.616022, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1897.0, 0.000188, 0.003424, 0.356704, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1897.0, 0.000184, 0.003403, 0.358824, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1897.0, 0.000222, 0.003421, 0.351524, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1858.0, 1859.0, 0.0011, 0.0097, 0.030288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1858.0, 1859.0, 0.0011, 0.0097, 0.030288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2126.0, 0.0016, 0.0111, 0.0326, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2126.0, 0.002435, 0.013008, 0.039056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2121.0, 0.0012, 0.0051, 0.017, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2182.0, 0.01269, 0.070386, 0.213056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2120.0, 0.0205, 0.0676, 0.291, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 44.0, 0.005062, 0.023775, 0.064912, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2015.0, 2196.0, 0.0006, 0.0031, 0.0436, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1861.0, 2196.0, 0.0006, 0.0031, 0.0436, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2118.0, 1780.0, 0.014222, 0.06951, 0.121602, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2118.0, 1780.0, 0.014222, 0.06951, 0.121602, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2116.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2114.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2113.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1924.0, 0.024837, 0.137353, 0.21539, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 2118.0, 0.0018, 0.0039, 0.0067, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1780.0, 0.013636, 0.077335, 0.11541, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1780.0, 0.013636, 0.077335, 0.11541, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 2117.0, 0.00714, 0.021, 0.0326, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1992.0, 0.015847, 0.094112, 0.149088, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1992.0, 0.0163, 0.097, 0.1432, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1977.0, 1927.0, 0.000918, 0.012759, 1.2575, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1927.0, 0.000926, 0.012736, 1.256638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1883.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1976.0, 0.001129, 0.015209, 1.424948, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1902.0, 0.000146, 0.001874, 0.18991, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1903.0, 0.000172, 0.001884, 0.195408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1780.0, 1992.0, 0.004254, 0.024125, 0.036002, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1780.0, 1992.0, 0.004254, 0.024125, 0.036002, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1975.0, 1977.0, 0.001129, 0.015209, 0.142494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1975.0, 1974.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2112.0, 2111.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2110.0, 2111.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 1844.0, 0.002676, 0.015397, 0.031688, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2109.0, 2207.0, 0.0017, 0.0107, 0.0284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 2207.0, 0.0006, 0.0105, 0.0286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 1769.0, 0.003999, 0.030444, 0.089226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 1769.0, 0.003999, 0.030444, 0.089226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 2005.0, 0.0016, 0.0048, 0.1224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 2204.0, 0.001983, 0.011962, 0.03345, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2109.0, 2108.0, 0.0017, 0.0091, 0.0272, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2109.0, 2108.0, 0.002178, 0.011857, 0.128572, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2107.0, 1948.0, 0.01167, 0.052547, 0.12149, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2107.0, 1953.0, 0.0086, 0.0528, 0.15631, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 1948.0, 0.004412, 0.025837, 0.072956, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 1921.0, 0.0041, 0.0339, 0.104598, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 2105.0, 0.005559, 0.034409, 0.034118, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 2105.0, 0.006452, 0.030781, 0.04556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1939.0, 0.001728, 0.014502, 0.11525, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1939.0, 0.001774, 0.014573, 0.113328, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2202.0, 2200.0, 0.000613, 0.004558, 0.02771, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2200.0, 0.000609, 0.004555, 0.027656, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1943.0, 0.000486, 0.004698, 0.007696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1943.0, 0.000486, 0.004698, 0.007696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1874.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2223.0, 0.00323, 0.013, 0.04, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2223.0, 0.00323, 0.013, 0.04, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2199.0, 0.00423, 0.0233, 0.06904, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2199.0, 0.002383, 0.018144, 0.053178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2201.0, 0.000809, 0.006324, 0.084454, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2201.0, 0.0008, 0.0063, 0.01612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1875.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1974.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1897.0, 0.001027, 0.013427, 1.31672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1897.0, 0.001027, 0.013427, 1.31672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1926.0, 0.00054, 0.007314, 0.736074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1974.0, 1973.0, 0.001798, 0.017107, 0.320912, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1984.0, 2153.0, 0.0013, 0.0098, 0.0296, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1984.0, 2153.0, 0.0013, 0.0098, 0.0298, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2104.0, 2119.0, 0.0099, 0.035083, 0.048204, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2012.0, 2011.0, 0.043836, 0.178923, 0.032564, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 1930.0, 0.00553, 0.029104, 0.081816, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 1930.0, 0.003466, 0.018151, 0.05141, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 2101.0, 0.0019, 0.012, 0.0332, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 2100.0, 0.0098, 0.0256, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2149.0, 0.0, 1e-06, 2e-06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2075.0, 0.004, 0.0362, 0.0958, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2098.0, 0.0042, 0.0213, 0.0612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2098.0, 0.00376, 0.021467, 0.060712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 1931.0, 0.005604, 0.031448, 0.087188, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 2099.0, 0.0023, 0.0112, 0.03, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 2099.0, 0.0026, 0.013, 0.03, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 1915.0, 0.001405, 0.006673, 0.0208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 1915.0, 0.001368, 0.00666, 0.020638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1806.0, 0.009481, 0.05461, 0.09703, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1942.0, 0.00216, 0.01062, 0.0171, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1942.0, 0.00216, 0.01062, 0.0171, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1915.0, 0.002927, 0.011569, 0.03306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1915.0, 0.002199, 0.011585, 0.0324, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1936.0, 2069.0, 0.001533, 0.01167, 0.03418, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1936.0, 2069.0, 0.001405, 0.01136, 0.03412, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1938.0, 2217.0, 0.000413, 0.002459, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[52.0, 2098.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 1838.0, 0.004812, 0.029932, 0.088632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 1838.0, 0.004831, 0.030014, 0.0893, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 2105.0, 0.004686, 0.03165, 0.96246, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 2105.0, 0.004761, 0.03174, 0.945046, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2097.0, 2182.0, 0.0012, 0.0056, 0.0108, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1959.0, 1876.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2164.0, 2179.0, 0.0053, 0.0326, 0.0446, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2134.0, 2096.0, 0.0064, 0.061, 0.0914, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1949.0, 1795.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1949.0, 1795.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1949.0, 2211.0, 0.00437, 0.0184, 0.0161, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1788.0, 2098.0, 0.008655, 0.03852, 0.0579, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1991.0, 0.00095, 0.00498, 0.008738, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1842.0, 0.001028, 0.005377, 0.008848, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1842.0, 0.001367, 0.007231, 0.011618, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1774.0, 0.000967, 0.008013, 0.027288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1774.0, 0.000967, 0.008013, 0.027288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 1948.0, 0.001734, 0.013202, 0.038696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 1948.0, 0.001734, 0.013202, 0.038696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 2105.0, 0.00244, 0.018575, 0.05444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 2105.0, 0.00244, 0.018575, 0.05444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2093.0, 2092.0, 0.0021, 0.009, 0.0162, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2093.0, 2092.0, 0.0021, 0.0092, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2122.0, 2091.0, 0.0018, 0.0107, 0.0316, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2122.0, 1.0, 0.0025, 0.01318, 0.01978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2089.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2088.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2090.0, 1993.0, 0.001073, 0.006678, 0.020362, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 1993.0, 0.001068, 0.006721, 0.020362, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2087.0, 0.0007, 0.004, 0.0106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2087.0, 0.0007, 0.004, 0.0106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2086.0, 0.0014, 0.0061, 0.0178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2086.0, 0.0015, 0.0062, 0.0178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2092.0, 0.000577, 0.004153, 0.012844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2092.0, 0.000577, 0.004153, 0.013046, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2084.0, 0.0085, 0.0302, 0.0566, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2088.0, 2084.0, 0.0085, 0.0393, 0.0566, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2088.0, 2085.0, 0.0019, 0.0104, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2085.0, 0.0016, 0.008, 0.022, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1779.0, 0.001312, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1779.0, 0.001312, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1859.0, 0.002117, 0.014224, 0.044428, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1859.0, 0.014442, 0.014442, 0.04484, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 2082.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 2135.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 2139.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 1771.0, 0.000327, 0.00455, 0.448486, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2135.0, 1966.0, 0.000205, 0.002384, 0.23393, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2135.0, 1966.0, 0.000168, 0.00234, 0.237148, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2135.0, 2081.0, 0.0006, 0.0071, 0.697466, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 2135.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 2139.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 2079.0, 0.0007, 0.0071, 0.6752, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1767.0, 1795.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1767.0, 1795.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[114.0, 109.0, 0.001236, 0.013293, 1.480528, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[114.0, 1786.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[113.0, 112.0, 0.001641, 0.01764, 1.964682, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[113.0, 1786.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2205.0, 0.001323, 0.013531, 0.041808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2205.0, 0.001323, 0.013531, 0.041808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2084.0, 9.8e-05, 0.001366, 0.134654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2084.0, 9.8e-05, 0.001366, 0.134654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 119.0, 0.003842, 0.035772, 0.102888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 119.0, 0.003842, 0.035772, 0.102888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1929.0, 1932.0, 0.00352, 0.01739, 0.027392, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2099.0, 2075.0, 0.0075, 0.0333, 0.0862, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2099.0, 1932.0, 0.000571, 0.003917, 0.011298, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2099.0, 1932.0, 0.000625, 0.004002, 0.011024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2192.0, 0.005799, 0.044143, 0.129376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2192.0, 0.005799, 0.044143, 0.129376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2197.0, 0.000333, 0.001914, 0.010434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2197.0, 0.000335, 0.001915, 0.010716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2195.0, 0.000709, 0.004256, 0.014632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2196.0, 0.001161, 0.006866, 0.02572, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1934.0, 1933.0, 0.006777, 0.036325, 0.099522, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1766.0, 2098.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1968.0, 1948.0, 0.007335, 0.040468, 0.132678, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1968.0, 1948.0, 0.007335, 0.040468, 0.132678, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 1986.0, 0.0014, 0.008, 0.012, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2133.0, 0.0024, 0.0152, 0.0254, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2133.0, 0.0028, 0.0165, 0.0256, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2122.0, 0.0014, 0.008, 0.0134, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2122.0, 0.0007, 0.0052, 0.0224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2021.0, 0.012484, 0.069281, 0.11486, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2132.0, 2131.0, 0.0015, 0.0066, 0.012, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2178.0, 2191.0, 0.006813, 0.043, 0.06108, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2178.0, 1818.0, 0.001267, 0.006536, 0.0117, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2178.0, 1818.0, 0.001185, 0.006504, 0.010946, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[12.0, 1679.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[12.0, 116.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 18.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 17.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 16.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 15.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1857.0, 51.0, 0.002531, 0.019174, 0.05495, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1857.0, 2156.0, 0.003173, 0.027163, 0.078504, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1982.0, 1911.0, 0.004746, 0.035379, 0.105292, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 1917.0, 0.00248, 0.01851, 0.055088, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 1917.0, 0.002438, 0.01845, 0.055446, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 2202.0, 0.001864, 0.014205, 0.044768, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 2202.0, 0.001869, 0.014081, 0.044908, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1914.0, 2107.0, 0.0036, 0.019, 0.051544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1914.0, 2058.0, 0.0061, 0.0313, 0.0847, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1914.0, 1953.0, 0.0113, 0.0675, 0.199492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[49.0, 2171.0, 0.001603, 0.012145, 0.034808, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[49.0, 2169.0, 0.001099, 0.008326, 0.023862, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2218.0, 2185.0, 0.001653, 0.010407, 0.0294, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1966.0, 0.000152, 0.001935, 0.20991, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1966.0, 0.000124, 0.001938, 0.209752, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1848.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1847.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1846.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1845.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2074.0, 2233.0, 0.0045, 0.0226, 0.0614, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2198.0, 0.003409, 0.020465, 0.11888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1829.0, 0.000246, 0.001611, 0.03219, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1829.0, 0.000222, 0.001538, 0.032516, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1867.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1865.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1840.0, 0.002366, 0.01494, 0.043588, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2073.0, 0.001, 0.0068, 0.0192, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2073.0, 0.001, 0.0072, 0.0196, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2169.0, 0.0016, 0.008, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2169.0, 0.002, 0.0121, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1973.0, 1868.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1973.0, 1866.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1973.0, 1897.0, 0.0014, 0.0163, 1.604962, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1973.0, 1926.0, 0.000371, 0.004039, 0.2452, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2221.0, 0.002538, 0.018658, 0.057658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1947.0, 0.000244, 0.001883, 0.006854, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1947.0, 0.000319, 0.001779, 0.007006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1947.0, 0.000316, 0.001744, 0.006838, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2216.0, 0.0032, 0.01325, 0.0247, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2220.0, 0.000283, 0.001786, 0.007918, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2220.0, 0.000276, 0.001786, 0.00784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1823.0, 0.006105, 0.032408, 0.092494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1823.0, 0.006105, 0.032408, 0.092494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2214.0, 0.00572, 0.02325, 0.0247, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1970.0, 0.000271, 0.002947, 0.303246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1798.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1799.0, 1897.0, 0.000631, 0.009242, 0.194064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1969.0, 9.4e-05, 0.000882, 0.09577, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1972.0, 0.00026, 0.00296, 0.303556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1897.0, 0.000581, 0.009148, 0.197, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1969.0, 9.5e-05, 0.000894, 0.096712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1776.0, 2066.0, 0.000748, 0.003551, 0.009954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1776.0, 2066.0, 0.000748, 0.003551, 0.009954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1806.0, 0.004027, 0.025987, 0.06444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1819.0, 0.000878, 0.008242, 0.022352, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1819.0, 0.001401, 0.008357, 0.023872, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1930.0, 0.003186, 0.016051, 0.046862, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1930.0, 0.003638, 0.018825, 0.052778, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1942.0, 0.001495, 0.008215, 0.023988, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1932.0, 0.003694, 0.020963, 0.05775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2095.0, 1991.0, 0.0038, 0.0265, 0.0452, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2095.0, 1774.0, 0.002207, 0.016799, 0.049234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2095.0, 1774.0, 0.002207, 0.016799, 0.049234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2206.0, 1954.0, 0.000436, 0.003126, 0.010554, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2206.0, 1954.0, 0.00048, 0.003156, 0.010722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2206.0, 2205.0, 0.0035, 0.0208, 0.0568, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2154.0, 2232.0, 0.001636, 0.007686, 0.020984, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2154.0, 2232.0, 0.001636, 0.007686, 0.020984, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2154.0, 1824.0, 0.001747, 0.011028, 0.02, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2068.0, 2174.0, 0.0053, 0.0356, 0.1608, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1995.0, 2127.0, 0.002277, 0.013038, 0.02106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1995.0, 2185.0, 0.009767, 0.035062, 0.048936, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1995.0, 2185.0, 0.005959, 0.032066, 0.049696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1819.0, 2062.0, 0.003176, 0.015785, 0.043182, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1819.0, 1953.0, 0.004039, 0.022981, 0.066948, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1775.0, 1817.0, 0.00056, 0.004262, 0.012492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1775.0, 1817.0, 0.00056, 0.004262, 0.012492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2067.0, 2004.0, 0.0011, 0.0053, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2067.0, 2066.0, 0.0035, 0.01357, 0.0193, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2205.0, 2130.0, 0.005, 0.0289, 0.081, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2205.0, 2130.0, 0.003152, 0.02578, 0.0731, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2177.0, 0.002603, 0.021498, 0.07278, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2177.0, 0.002582, 0.021425, 0.0731, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1919.0, 0.001405, 0.011326, 0.219716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1919.0, 0.00139, 0.011124, 0.22341, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2156.0, 0.005768, 0.043001, 0.127542, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2156.0, 0.005768, 0.043001, 0.127542, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2175.0, 0.002549, 0.017938, 0.059848, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2175.0, 0.002488, 0.01794, 0.059848, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2126.0, 0.002403, 0.02124, 0.071276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2126.0, 0.002353, 0.021196, 0.072128, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1832.0, 0.000607, 0.004514, 0.015152, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2.0, 0.000607, 0.004504, 0.015044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1790.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1790.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1786.0, 0.000733, 0.009358, 1.015624, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1786.0, 0.000733, 0.009358, 1.015624, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 123.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 2079.0, 0.000508, 0.0044, 0.4396, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 2081.0, 0.000464, 0.00536, 0.5338, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[123.0, 1959.0, 0.000968, 0.01148, 1.1461, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1978.0, 2183.0, 0.0019, 0.0102, 0.0276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1978.0, 1888.0, 0.0035, 0.0221, 0.064074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1978.0, 1888.0, 0.0036, 0.0222, 0.064304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2121.0, 2071.0, 0.0028, 0.0171, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[37.0, 2149.0, 0.001399, 0.00713, 0.021124, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1791.0, 2187.0, 0.000547, 0.004293, 0.012496, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1791.0, 2187.0, 0.000564, 0.003571, 0.010164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2087.0, 2203.0, 0.01588, 0.0793, 0.1166, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1840.0, 1782.0, 0.002004, 0.011367, 0.016964, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1888.0, 42.0, 0.001897, 0.010818, 0.029982, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2065.0, 2064.0, 0.0047, 0.0232, 0.0596, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2065.0, 1825.0, 0.010653, 0.057707, 0.104974, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2182.0, 1831.0, 0.006864, 0.041913, 0.08442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2182.0, 2097.0, 0.001925, 0.009143, 0.02563, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2182.0, 2120.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2182.0, 44.0, 0.007721, 0.036266, 0.099012, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2120.0, 1454.0, 0.0152, 0.069, 0.1232, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2120.0, 2068.0, 0.0076, 0.0355, 0.1318, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2120.0, 2124.0, 0.0107, 0.0548, 0.1562, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2120.0, 2063.0, 0.0078, 0.0253, 0.08, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1958.0, 2230.0, 0.000968, 0.01148, 1.2124, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1765.0, 2212.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1765.0, 1909.0, 0.009008, 0.044028, 0.077024, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2102.0, 0.0019, 0.0088, 0.0194, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2102.0, 0.0016, 0.0072, 0.021, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2102.0, 0.001246, 0.007242, 0.0218, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1942.0, 0.0066, 0.03245, 0.0523, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2061.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2058.0, 0.0101, 0.0509, 0.141, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2060.0, 0.0013, 0.0092, 0.025, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2060.0, 0.00201, 0.01179, 0.0338, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2059.0, 0.0034, 0.01617, 0.044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1953.0, 0.0025, 0.014, 0.036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1953.0, 0.0025, 0.014, 0.036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2003.0, 0.001561, 0.014418, 1.393376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2141.0, 0.000512, 0.008616, 0.84623, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2010.0, 0.000932, 0.01154, 1.07545, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2009.0, 0.001, 0.0116, 1.0912, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2140.0, 0.0007, 0.008796, 0.873706, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2056.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2206.0, 0.00062, 0.00339, 0.00774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2206.0, 0.00054, 0.00357, 0.00774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2205.0, 0.003, 0.0161, 0.0416, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2207.0, 2054.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2052.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2018.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 1784.0, 0.00052, 0.00287, 0.00941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 1784.0, 0.00052, 0.00287, 0.00941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2053.0, 0.0015, 0.0078, 0.022, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2052.0, 2051.0, 0.0013, 0.0078, 0.0226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 315.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2050.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2019.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2081.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2230.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2081.0, 307.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2081.0, 2230.0, 0.00054, 0.00738, 0.766086, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2187.0, 0.00126, 0.007397, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 1916.0, 0.000818, 0.0061, 0.001808, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2124.0, 1916.0, 0.000818, 0.0061, 0.001808, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2124.0, 6.0, 0.000717, 0.002597, 0.003648, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2121.0, 0.002019, 0.0095, 0.046, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2014.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2006.0, 0.0087, 0.0339, 0.2008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 1774.0, 0.001156, 0.006379, 0.020912, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2124.0, 1774.0, 0.001156, 0.006379, 0.020912, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2014.0, 2174.0, 0.0026, 0.0129, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 2174.0, 0.0023, 0.0129, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 2121.0, 0.002312, 0.016324, 0.04676, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 2063.0, 0.0081, 0.0314, 0.0662, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1773.0, 0.000279, 0.003874, 0.381812, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1773.0, 0.000279, 0.003874, 0.381812, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2229.0, 0.000612, 0.007548, 0.76969, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2229.0, 0.000684, 0.007548, 0.761836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2024.0, 0.000436, 0.006384, 0.62015, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2024.0, 0.00044, 0.00638, 0.6202, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2230.0, 2024.0, 0.00044, 0.00638, 0.6202, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2071.0, 2070.0, 0.0004, 0.0025, 0.0666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2071.0, 2070.0, 0.0003, 0.0013, 0.0666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2071.0, 2108.0, 0.0025, 0.0133, 0.0396, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1844.0, 0.003178, 0.024071, 0.068986, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1844.0, 0.003178, 0.024071, 0.068986, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 2024.0, 0.000296, 0.004117, 0.40581, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 2024.0, 0.000296, 0.004117, 0.40581, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1843.0, 1954.0, 0.000196, 0.001444, 0.005702, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1843.0, 1954.0, 0.00017, 0.001475, 0.00593, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1781.0, 0.002351, 0.017893, 0.052442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1781.0, 0.002515, 0.019148, 0.05612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1791.0, 0.001184, 0.005796, 0.016876, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1791.0, 0.000773, 0.005178, 0.014792, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 2091.0, 0.002873, 0.014873, 0.026988, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1990.0, 2091.0, 0.001843, 0.012695, 0.028906, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2092.0, 1949.0, 0.000576, 0.005568, 0.00912, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2075.0, 1776.0, 0.003123, 0.014847, 0.041616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2075.0, 1776.0, 0.003123, 0.014847, 0.041616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2075.0, 2066.0, 0.003, 0.0162, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2075.0, 2066.0, 0.003, 0.0162, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1909.0, 1831.0, 0.000425, 0.002347, 0.007694, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1909.0, 1831.0, 0.000425, 0.002347, 0.007694, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2004.0, 2000.0, 0.0043, 0.0189, 0.0516, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[50.0, 1894.0, 0.007438, 0.037376, 0.062508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[50.0, 1894.0, 0.007438, 0.037376, 0.062508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2180.0, 2166.0, 0.011111, 0.065754, 0.098978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2180.0, 2134.0, 0.0056, 0.0304, 0.0504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2000.0, 0.0109, 0.0472, 0.1306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2064.0, 0.00604, 0.037441, 0.111652, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2064.0, 0.006511, 0.037267, 0.111562, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2065.0, 0.015, 0.0413, 0.0936, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2048.0, 2047.0, 0.0049, 0.021, 0.034, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2048.0, 2214.0, 0.0132, 0.0474, 0.074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1913.0, 2153.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1913.0, 2153.0, 0.0017, 0.0123, 0.038104, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1913.0, 2132.0, 0.0015, 0.0104, 0.03276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1913.0, 2132.0, 0.0014, 0.0105, 0.03257, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1850.0, 2204.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1850.0, 2204.0, 0.00068, 0.003595, 0.011282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1935.0, 1934.0, 0.00093, 0.005165, 0.014484, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2046.0, 2010.0, 0.00011, 0.0016, 0.157, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2046.0, 2010.0, 0.000112, 0.001608, 0.1727, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2046.0, 2045.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2045.0, 2010.0, 0.00011, 0.0016, 0.157, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2044.0, 2045.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2058.0, 1933.0, 0.001967, 0.011025, 0.032296, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2058.0, 1934.0, 0.00524, 0.028022, 0.078426, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2084.0, 1779.0, 0.003284, 0.025003, 0.07328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2084.0, 1779.0, 0.003284, 0.025003, 0.07328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2195.0, 2196.0, 0.0006, 0.0034, 0.016282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1764.0, 1831.0, 4.9e-05, 0.000287, 0.001824, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[56.0, 2153.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2042.0, 2041.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2040.0, 2041.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2039.0, 2038.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2037.0, 2038.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2006.0, 1769.0, 0.005199, 0.039577, 0.115992, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2028.0, 1907.0, 0.001632, 0.014674, 0.046224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2028.0, 1955.0, 1e-06, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2028.0, 2228.0, 0.0022, 0.016793, 0.049218, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1805.0, 2064.0, 0.004105, 0.025004, 0.073654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1989.0, 2075.0, 0.002775, 0.01195, 0.031086, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1989.0, 2075.0, 0.002042, 0.009724, 0.0056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2036.0, 1777.0, 0.001686, 0.01625, 0.028548, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2036.0, 1776.0, 0.002319, 0.017657, 0.05175, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2036.0, 1776.0, 0.002319, 0.017657, 0.05175, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2158.0, 2159.0, 0.003785, 0.035893, 0.102126, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2158.0, 1832.0, 0.003733, 0.026363, 0.08693, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2158.0, 2.0, 0.003679, 0.026454, 0.08693, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2063.0, 2068.0, 0.0013, 0.0076, 0.1, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2085.0, 1949.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2060.0, 2101.0, 0.001194, 0.006769, 0.02107, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2060.0, 2101.0, 0.00123, 0.00755, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1828.0, 1827.0, 0.002291, 0.013129, 0.037544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1951.0, 0.000967, 0.005386, 0.015858, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1951.0, 0.00083, 0.005543, 0.015894, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1800.0, 0.0032, 0.0256, 0.050238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1800.0, 0.0032, 0.0256, 0.050238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1952.0, 0.0053, 0.0287, 0.043366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1888.0, 0.0046, 0.0265, 0.07574, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1888.0, 0.0049, 0.0281, 0.076512, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1893.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1891.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 2047.0, 0.003, 0.0182, 0.052822, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 2047.0, 0.003, 0.0183, 0.052868, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1827.0, 0.000858, 0.005166, 0.015054, 10.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1827.0, 0.000914, 0.005525, 0.01506, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1897.0, 1895.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1897.0, 1892.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[120.0, 1897.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 1917.0, 0.006735, 0.04502, 0.1218, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 1978.0, 0.005, 0.0273, 0.0742, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 2048.0, 0.011661, 0.047648, 0.068356, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 2163.0, 0.0157, 0.0776, 0.1892, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1762.0, 1921.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1912.0, 0.0035, 0.0199, 0.055758, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2167.0, 0.0014, 0.0093, 0.02272, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2167.0, 0.0026, 0.0129, 0.0206, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2224.0, 0.0008, 0.00608, 0.018, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2224.0, 0.0007, 0.0061, 0.01778, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1982.0, 0.004371, 0.036771, 0.102082, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1911.0, 0.000587, 0.005466, 0.015722, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1911.0, 0.001272, 0.011845, 0.034066, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1995.0, 0.0032, 0.0166, 0.0476, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2035.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1980.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1983.0, 0.005, 0.0147, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1966.0, 0.000356, 0.005065, 0.51967, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 2003.0, 0.00121, 0.01355, 1.2482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1772.0, 0.000317, 0.00405, 0.439468, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2034.0, 1772.0, 0.000309, 0.004298, 0.42362, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2034.0, 2033.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1981.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 2032.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1771.0, 0.000759, 0.010812, 1.0325, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[121.0, 2034.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1801.0, 2131.0, 0.0037, 0.0294, 0.085666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2220.0, 2170.0, 0.000467, 0.004897, 0.015144, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2220.0, 2170.0, 0.000467, 0.0049, 0.015136, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2030.0, 1940.0, 0.000667, 0.003612, 0.055194, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2204.0, 1844.0, 0.001053, 0.007978, 0.022864, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2204.0, 1844.0, 0.001053, 0.007978, 0.022864, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2204.0, 2206.0, 0.0023, 0.0127, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 1992.0, 0.0055, 0.0269, 0.044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 1871.0, 0.0055, 0.0269, 0.044, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2233.0, 2190.0, 0.0017, 0.0128, 0.0398, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 2228.0, 0.001919, 0.010339, 0.029802, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 2228.0, 0.003985, 0.013988, 0.035304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2223.0, 2169.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2223.0, 2222.0, 0.003, 0.0199, 0.0546, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2223.0, 2222.0, 0.002477, 0.015386, 0.086506, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1946.0, 2124.0, 0.002181, 0.012442, 0.034482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1946.0, 1769.0, 0.004399, 0.033488, 0.098148, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2213.0, 2212.0, 0.00872, 0.0415, 0.0603, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1823.0, 1822.0, 0.001557, 0.008831, 0.013178, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1823.0, 1822.0, 0.001557, 0.008831, 0.013178, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1992.0, 47.0, 0.008124, 0.030296, 0.05087, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1992.0, 1871.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[38.0, 1921.0, 0.005421, 0.030248, 0.044896, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1832.0, 2.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2199.0, 2163.0, 0.012972, 0.060245, 0.0882, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 1825.0, 0.002794, 0.015736, 0.030542, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 1825.0, 0.002779, 0.016037, 0.030802, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 2004.0, 0.0061, 0.0282, 0.0736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 119.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2161.0, 2165.0, 0.002758, 0.017246, 0.05042, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2161.0, 2165.0, 0.00281, 0.017192, 0.050784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2190.0, 1955.0, 0.0015, 0.005, 0.008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2059.0, 1933.0, 0.007141, 0.03759, 0.110426, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2059.0, 2060.0, 0.001137, 0.007726, 0.021632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2066.0, 1777.0, 0.008535, 0.047552, 0.135966, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2066.0, 2036.0, 0.0277, 0.0546, 0.1086, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2066.0, 1817.0, 0.001193, 0.008897, 0.028558, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2066.0, 1817.0, 0.001271, 0.008926, 0.028726, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2214.0, 1822.0, 0.001297, 0.008265, 0.028008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2214.0, 2048.0, 0.004664, 0.019059, 0.027342, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2228.0, 2188.0, 0.0032, 0.0124, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 47.0, 0.002432, 0.009068, 0.015226, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 1907.0, 0.000749, 0.006419, 0.019036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 1907.0, 0.000404, 0.006082, 0.019234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 48.0, 0.002281, 0.010715, 0.029254, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 48.0, 0.002281, 0.010715, 0.029254, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 2028.0, 0.003431, 0.018104, 0.05278, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 2028.0, 0.002438, 0.018489, 0.053282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 2025.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1790.0, 0.000393, 0.006763, 0.725106, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 2139.0, 0.0012, 0.0095, 0.8706, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2024.0, 2034.0, 0.0009, 0.0131, 1.2058, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2024.0, 2023.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1771.0, 0.00041, 0.005233, 0.567852, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1771.0, 0.000362, 0.005035, 0.496268, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1816.0, 2003.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1816.0, 1899.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1815.0, 2003.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1815.0, 1899.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1923.0, 1807.0, 0.004043, 0.031502, 0.092992, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1837.0, 0.00419, 0.032116, 0.097538, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1837.0, 0.003923, 0.032344, 0.097258, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 2106.0, 0.005601, 0.039221, 0.120638, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1923.0, 2106.0, 0.00442, 0.04115, 0.118408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1921.0, 0.008033, 0.074789, 0.215092, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1968.0, 8.3e-05, 0.001479, 0.004712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1968.0, 6.2e-05, 0.001495, 0.004682, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 2178.0, 0.001489, 0.009279, 0.019006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 2178.0, 0.0019, 0.008904, 0.019006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1818.0, 0.000639, 0.003844, 0.011098, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1818.0, 0.000629, 0.00385, 0.011346, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 2136.0, 0.000834, 0.010243, 0.944442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 2144.0, 0.000915, 0.009985, 0.950792, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 500.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1899.0, 499.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1836.0, 1968.0, 0.001023, 0.007793, 0.02284, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1836.0, 1968.0, 0.001023, 0.007793, 0.02284, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1835.0, 1899.0, 3.5e-05, 0.000554, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2160.0, 0.000808, 0.00615, 0.018024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2160.0, 0.000808, 0.00615, 0.018024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1795.0, 0.002839, 0.021615, 0.06335, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1795.0, 0.002839, 0.021615, 0.06335, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2210.0, 0.001992, 0.015161, 0.044434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2210.0, 0.002895, 0.022041, 0.0646, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1844.0, 0.002519, 0.019179, 0.056212, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1768.0, 1994.0, 0.002367, 0.013057, 0.042808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1994.0, 0.001992, 0.015161, 0.044434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1910.0, 0.001432, 0.010899, 0.031942, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1910.0, 0.001432, 0.010899, 0.031942, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 2008.0, 0.002104, 0.008588, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 2016.0, 0.002104, 0.008588, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 1852.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1926.0, 1853.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 2159.0, 0.005669, 0.029498, 0.084286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 1831.0, 0.005312, 0.030531, 0.088372, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 1831.0, 0.005391, 0.030252, 0.088402, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 2097.0, 0.003948, 0.020204, 0.05813, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1983.0, 1950.0, 0.0012, 0.0116, 0.019, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2086.0, 2030.0, 0.00086, 0.004229, 0.012674, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2185.0, 2217.0, 0.0024, 0.0101, 0.0152, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2027.0, 1947.0, 0.000579, 0.003409, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2027.0, 1947.0, 0.000579, 0.00341, 0.00806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2027.0, 1822.0, 0.003665, 0.023351, 0.069198, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1860.0, 1956.0, 0.000192, 0.001612, 0.007754, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1860.0, 1956.0, 0.00019, 0.001612, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[39.0, 2146.0, 0.005056, 0.02051, 0.02918, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1994.0, 2160.0, 0.003787, 0.015066, 0.02744, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1994.0, 1844.0, 0.006343, 0.034897, 0.072984, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1994.0, 2088.0, 0.003409, 0.018265, 0.06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1994.0, 2088.0, 0.00339, 0.018097, 0.06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1774.0, 2125.0, 0.000519, 0.002865, 0.009394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1774.0, 2125.0, 0.000519, 0.002865, 0.009394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2053.0, 2051.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1900.0, 2196.0, 0.00048, 0.0046, 0.0076, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2091.0, 1781.0, 0.000508, 0.003865, 0.011328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2091.0, 1787.0, 0.000211, 0.000705, 0.03415, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2091.0, 1.0, 0.0, 1e-06, 2e-06, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1.0, 1781.0, 0.00044, 0.003349, 0.009814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1.0, 1787.0, 0.000216, 0.000738, 0.035304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1803.0, 2153.0, 0.004651, 0.032568, 0.093178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1905.0, 2129.0, 0.004099, 0.034324, 0.09695, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1904.0, 2129.0, 0.004105, 0.025004, 0.073654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 2124.0, 0.004633, 0.02824, 0.08162, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 1769.0, 0.003559, 0.027095, 0.07941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 1769.0, 0.003559, 0.027095, 0.07941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 1945.0, 0.00096, 0.00928, 0.0152, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1941.0, 1829.0, 0.001096, 0.005395, 0.043434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2021.0, 2020.0, 0.00781, 0.0352, 0.0262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2021.0, 2091.0, 0.014, 0.0727, 0.110892, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2163.0, 1783.0, 0.004747, 0.036136, 0.10591, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2163.0, 2026.0, 0.0123, 0.0679, 0.104, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1902.0, 1903.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1859.0, 2204.0, 0.0049, 0.0288, 0.08016, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2222.0, 1917.0, 0.002438, 0.01471, 0.04222, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1950.0, 2215.0, 0.00095, 0.005619, 0.018094, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1950.0, 2215.0, 0.001591, 0.007644, 0.012924, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1950.0, 2218.0, 0.003325, 0.02037, 0.03325, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[316.0, 315.0, 0.001572, 0.02166, 3.44616, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[310.0, 307.0, 0.001592, 0.021628, 3.43046, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1922.0, 1921.0, 0.0055, 0.0332, 0.048824, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[482.0, 1789.0, 0.001904, 0.030428, 2.94106, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[484.0, 483.0, 0.001926, 0.030303, 2.93952, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 1899.0, 0.001544, 0.016148, 1.54645, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 1899.0, 0.00134, 0.014248, 1.32665, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 482.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 484.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[500.0, 508.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[499.0, 508.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1685.0, 1869.0, 0.00131, 0.072778, 0.0027, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1706.0, 1985.0, 0.0003, 0.019557, 0.0, 360.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1642.0, 1763.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1747.0, 2181.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1746.0, 2181.0, 0.0047, 0.156, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[31.0, 57.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[30.0, 57.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[23.0, 40.0, 0.002828, 0.1393, 0.0011, 100.0, 0.0,0.0,0.940909, 0.0,1.0,-30.0, 30.0, 0.1 ],
[4.0, 3.0, 0.002083, 0.116667, 0.00156, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1666.0, 1810.0, 0.000508, 0.037, 0.004284, 420.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1665.0, 1810.0, 0.000507, 0.036952, 0.003864, 420.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1745.0, 2171.0, 0.000585, 0.034067, 0.006103, 436.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1744.0, 2171.0, 0.000585, 0.034067, 0.061027, 436.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1743.0, 2171.0, 0.000526, 0.030275, 0.00981, 418.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1742.0, 2171.0, 0.000526, 0.030275, 0.00981, 418.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1664.0, 1809.0, 0.0012, 0.074111, 0.0018, 180.0, 0.0,0.0,1.097727, 0.0,0.0,-30.0, 30.0, 0.1 ],
[26.0, 53.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[28.0, 55.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[19.0, 36.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1741.0, 2162.0, 0.0006, 0.0345, 0.0, 418.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1740.0, 2162.0, 0.0006, 0.0343, 0.0, 418.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1670.0, 1841.0, 0.000544, 0.037838, 0.0148, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1669.0, 1841.0, 0.000544, 0.037838, 0.0148, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1687.0, 1906.0, 0.000791, 0.048433, 0.0033, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1686.0, 1906.0, 0.000791, 0.048433, 0.0033, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1729.0, 1986.0, 0.000659, 0.043486, 0.00189, 430.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1728.0, 2122.0, 0.000659, 0.043486, 0.00189, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1696.0, 1937.0, 0.000802, 0.048833, 0.0051, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1695.0, 1792.0, 0.000802, 0.048833, 0.0051, 370.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1690.0, 1901.0, 0.002669, 0.136, 0.0009, 100.0, 0.0,0.0,1.00625, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1659.0, 1802.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1739.0, 2152.0, 0.0041, 0.0942, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1738.0, 2152.0, 0.001394, 0.0686, 0.005, 240.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1737.0, 2152.0, 0.002018, 0.0757, 0.00184, 240.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1707.0, 2152.0, 0.000659, 0.066286, 0.00819, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1752.0, 2152.0, 0.000659, 0.041543, 0.00945, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[13.0, 1820.0, 0.003265, 0.139, 0.00076, 120.0, 0.0,0.0,0.940909, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1703.0, 1984.0, 0.001884, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1702.0, 1984.0, 0.001871, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1704.0, 1984.0, 0.001876, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1705.0, 1984.0, 0.001867, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[34.0, 59.0, 0.0064, 0.1807, 0.0, 75.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[33.0, 58.0, 0.0064, 0.1807, 0.0, 75.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1678.0, 1854.0, 0.000769, 0.050067, 0.00276, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1677.0, 1854.0, 0.000762, 0.0499, 0.00276, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1655.0, 1826.0, 0.000959, 0.192917, 0.00084, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[27.0, 54.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1657.0, 1793.0, 0.00298, 0.1364, 0.0013, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1650.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1648.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[35.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1682.0, 1858.0, 0.000527, 0.04415, 0.0034, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1681.0, 1858.0, 0.000527, 0.04415, 0.0034, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2115.0, 2118.0, 0.0029, 0.0762, 0.0, 300.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2111.0, 2117.0, 0.0045, 0.1801, 0.0, 90.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2104.0, 2012.0, 0.005505, 0.199524, 0.001512, 63.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1736.0, 2104.0, 0.006292, 0.268, 0.00075, 50.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1735.0, 2104.0, 0.006204, 0.268, 0.00075, 50.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1734.0, 2149.0, 0.002101, 0.056458, 0.014304, 240.0, 0.0,0.0,1.1, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1733.0, 2149.0, 0.001332, 0.059167, 0.008592, 240.0, 0.0,0.0,1.1, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1732.0, 2149.0, 0.001465, 0.057917, 0.009744, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1694.0, 1936.0, 0.000531, 0.036378, 0.00407, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1693.0, 1936.0, 0.000531, 0.036378, 0.00407, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[25.0, 52.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1701.0, 1959.0, 0.000326, 0.0237, 0.0072, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1700.0, 1959.0, 0.000326, 0.0237, 0.0072, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1652.0, 1788.0, 0.003869, 0.14, 0.002, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1645.0, 1767.0, 0.0115, 0.2541, 0.0, 400.0, 0.0,0.0,1.025, 0.0,1.0,-30.0, 30.0, 0.1 ],
[24.0, 1767.0, 0.0115, 0.2541, 0.0, 400.0, 0.0,0.0,1.025, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1656.0, 1929.0, 0.002209, 0.100333, 2.4e-05, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[14.0, 1929.0, 0.002431, 0.116667, 6e-05, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1644.0, 1766.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[12.0, 1857.0, 0.000929, 0.054167, 0.00648, 240.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 1857.0, 0.000948, 0.054167, 0.00648, 240.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 1857.0, 0.003124, 0.133, 0.0022, 100.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1691.0, 2013.0, 0.004251, 0.1313, 0.0015, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1662.0, 2013.0, 0.001786, 0.099067, 0.003675, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1731.0, 2095.0, 0.001658, 0.068, 0.0046, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1730.0, 2095.0, 0.001598, 0.0681, 0.004, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1649.0, 1775.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[32.0, 1775.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1651.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1653.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1654.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1674.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[20.0, 37.0, 0.002851, 0.13, 0.00066, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1668.0, 2182.0, 0.0029, 0.0694, 0.0107, 720.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1727.0, 2120.0, 0.000367, 0.023333, 0.0321, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1726.0, 2120.0, 0.000367, 0.023333, 0.0321, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1697.0, 1958.0, 0.000117, 0.023367, 0.01176, 720.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1643.0, 1765.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1725.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1724.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1710.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.06818, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1672.0, 1843.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1671.0, 1843.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1723.0, 2011.0, 0.005759, 0.207937, 0.001512, 32.0, 0.0,0.0,1.0375, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1722.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1721.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1720.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1719.0, 2180.0, 0.0054, 0.116, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1676.0, 1850.0, 0.000178, 0.053846, 0.0, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1675.0, 1850.0, 0.000178, 0.053846, 0.0, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1718.0, 2045.0, 0.000218, 0.01863, 0.0, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1717.0, 2046.0, 0.000218, 0.01827, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1692.0, 2045.0, 0.000175, 0.015526, 0.013338, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1663.0, 2045.0, 0.000175, 0.015526, 0.013338, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1709.0, 2195.0, 0.001558, 0.08475, 0.00336, 160.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1708.0, 2195.0, 0.001879, 0.088667, 0.00435, 160.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[5.0, 1764.0, 0.002083, 0.116667, 0.00156, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[29.0, 56.0, 0.002914, 0.127, 0.0012, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2038.0, 2096.0, 0.0022, 0.114, 0.0, 120.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1661.0, 1805.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1699.0, 2229.0, 0.000375, 0.022667, 0.00294, 720.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1698.0, 2229.0, 0.001028, 0.046333, 0.0054, 720.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1714.0, 2158.0, 0.0008, 0.0461, 0.0, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1713.0, 2158.0, 0.0008, 0.0463, 0.0, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1716.0, 2229.0, 0.0008, 0.0451, 0.0, 370.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1715.0, 2229.0, 0.0007, 0.0411, 0.0, 370.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1680.0, 1828.0, 0.002439, 0.111755, 0.000752, 120.0, 0.0,0.0,0.988943, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1641.0, 1762.0, 0.003175, 0.1308, 0.00239, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1658.0, 1801.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[21.0, 38.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1667.0, 1836.0, 0.000318, 0.02355, 0.00108, 720.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1673.0, 1835.0, 0.000328, 0.023833, 0.00168, 720.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1712.0, 2027.0, 0.0006, 0.0348, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1711.0, 2027.0, 0.0006, 0.0348, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1749.0, 1969.0, 0.000223, 0.0195, 0.004392, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1748.0, 1969.0, 0.000228, 0.019319, 0.004248, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1684.0, 1860.0, 0.000526, 0.037775, 0.0028, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1683.0, 1860.0, 0.000528, 0.0378, 0.00236, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[22.0, 39.0, 0.000706, 0.0772, 0.00092, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1660.0, 1803.0, 0.003032, 0.14, 0.0013, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1689.0, 1905.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[117.0, 1905.0, 0.002828, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[110.0, 1905.0, 0.002841, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[108.0, 1905.0, 0.002828, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1688.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[118.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[111.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[107.0, 1904.0, 0.00297, 0.137, 0.0027, 50.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1751.0, 1902.0, 0.000223, 0.0195, 0.004176, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1750.0, 1902.0, 0.000219, 0.019278, 0.00432, 720.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 1633.0, 0.002, 0.0983, 0.0, 150.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1635.0, 1633.0, 0.0014, 0.0563, 0.0, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1634.0, 1633.0, 0.0009, -0.003, 0.0, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 1631.0, 0.002, 0.0997, 0.0, 150.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1635.0, 1631.0, 0.0014, 0.0567, 0.0, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1632.0, 1631.0, 0.0008, -0.0033, 0.0, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 1628.0, 0.001271, 0.096333, 0.00115, 150.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1630.0, 1628.0, 0.001185, 0.057, 0.00115, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1629.0, 1628.0, 0.001033, -0.005, 0.00115, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1965.0, 1587.0, 6.7e-05, 0.018139, 0.00103533, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2231.0, 1587.0, 5.6e-05, -0.00171, 0.00103533, 1002.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1964.0, 1587.0, 0.000397, 0.03773, 0.00103533, 270.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1961.0, 1586.0, 6.4e-05, 0.01821, 0.00103533, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1962.0, 1586.0, 5.9e-05, -0.00176, 0.00103533, 1002.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1963.0, 1586.0, 0.000397, 0.037788, 0.00103533, 270.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2002.0, 1627.0, 8.6e-05, 0.01918, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1999.0, 1627.0, 8.8e-05, -0.00199, 0.0, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1997.0, 1627.0, 0.000652, 0.04874, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2001.0, 1626.0, 8.6e-05, 0.01918, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1998.0, 1626.0, 8.8e-05, -0.00199, 0.0, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1996.0, 1626.0, 0.000652, 0.04874, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1970.0, 1592.0, 6.6e-05, 0.018757, 0.00120233, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1592.0, 5.9e-05, -0.00301, 0.00120233, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1864.0, 1592.0, 0.000397, 0.038328, 0.00120233, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 1591.0, 6.6e-05, 0.018757, 0.00126933, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1591.0, 5.9e-05, -0.00301, 0.00126933, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1863.0, 1591.0, 0.000397, 0.038328, 0.00126933, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1772.0, 1556.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1556.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1759.0, 1556.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1772.0, 1555.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1555.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1758.0, 1555.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1855.0, 1584.0, 8.3e-05, 0.021439, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1856.0, 1584.0, 6.5e-05, -0.00326, 0.0, 400.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1957.0, 1584.0, 0.000454, 0.038229, 0.0, 400.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1813.0, 1570.0, 7.8e-05, 0.018807, 0.001336, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1812.0, 1570.0, 5.7e-05, -0.00212, 0.001336, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1811.0, 1570.0, 0.000428, 0.033328, 0.001336, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1886.0, 1573.0, 6.3e-05, 0.018623, 0.00153633, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1887.0, 1573.0, 6.3e-05, -0.00257, 0.00153633, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1884.0, 1573.0, 0.000381, 0.035269, 0.00153633, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1578.0, 5.8e-05, 0.017275, 0.002004, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1578.0, 6.9e-05, -0.00173, 0.002004, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1885.0, 1578.0, 0.000349, 0.039152, 0.002004, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2143.0, 1624.0, 0.000125, 0.02587, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2150.0, 1624.0, 9.2e-05, -0.00513, 0.0, 750.0, 0.0,0.0,1.07273, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1625.0, 1624.0, 0.000505, 0.04532, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2138.0, 1622.0, 0.000228, 0.02372, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2147.0, 1622.0, 0.000123, -0.00264, 0.0, 750.0, 0.0,0.0,1.06818, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1623.0, 1622.0, 0.000586, 0.02816, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1790.0, 1564.0, 9.6e-05, 0.0209, 0.002, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1564.0, 7.9e-05, -0.00277, 0.002, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1565.0, 1564.0, 0.000524, 0.052407, 0.002, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1790.0, 1563.0, 9.6e-05, 0.0209, 0.002, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1563.0, 7.9e-05, -0.00277, 0.002, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1565.0, 1563.0, 0.000524, 0.052407, 0.002, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 1619.0, 0.00085, 0.01, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1621.0, 1619.0, 0.0048, 0.1195, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1620.0, 1619.0, 0.0027, 0.1195, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1875.0, 1590.0, 8e-05, 0.01881, 0.0, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1874.0, 1590.0, 0.00277, -0.00232, 0.0, 1002.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1873.0, 1590.0, 0.0004, 0.037, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1974.0, 1572.0, 8e-06, 0.018685, 0.00153333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1572.0, -1e-05, -0.0033, 0.00153333, 10000.0, 0.0,0.0,1.01932, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1872.0, 1572.0, 0.000442, 0.039535, 0.00153333, 300.0, 0.0,0.0,0.978409, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2082.0, 1618.0, 0.000117, 0.02364, 0.00205, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2089.0, 1618.0, 4.2e-05, -0.00236, 0.00205, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2078.0, 1618.0, 0.000345, 0.031, 0.00205, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 1617.0, 6.6e-05, 0.022113, 0.001075, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1617.0, 9e-05, -0.00185, 0.001075, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2077.0, 1617.0, 0.000509, 0.047513, 0.001075, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 1616.0, 0.000115, 0.022847, 0.00225, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1616.0, 0.000118, -0.00186, 0.00225, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2076.0, 1616.0, 0.000507, 0.03022, 0.00225, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1786.0, 1562.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 1562.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1755.0, 1562.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1786.0, 1561.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 1561.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1754.0, 1561.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1868.0, 1615.0, 0.000105, 0.01782, 0.003375, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1867.0, 1615.0, 5.8e-05, -0.00247, 0.003375, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2072.0, 1615.0, 0.000494, 0.030927, 0.003375, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1866.0, 1614.0, 7.9e-05, 0.019153, 0.00145, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1865.0, 1614.0, 6.4e-05, -0.00314, 0.00145, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2007.0, 1614.0, 0.000335, 0.030553, 0.00145, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1568.0, 7.8e-05, 0.018079, 0.001336, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1568.0, 4.9e-05, -0.00241, 0.001336, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1569.0, 1568.0, 0.000403, 0.038458, 0.001336, 300.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1566.0, 7.4e-05, 0.018598, 0.001837, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1566.0, 5.3e-05, -0.00316, 0.001837, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1567.0, 1566.0, 0.000378, 0.039316, 0.001837, 300.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1611.0, 0.001709, 0.13125, 0.000972, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1613.0, 1611.0, 0.001024, 0.070417, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1612.0, 1611.0, 0.001075, -0.00625, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1608.0, 0.0021, 0.1588, 0.000972, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1610.0, 1608.0, 0.0012, 0.0852, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1609.0, 1608.0, 0.0013, 0.0063, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1585.0, 7.3e-05, 0.018815, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1585.0, 6e-05, -0.00139, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1881.0, 1585.0, 0.000405, 0.037565, 0.00096667, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[123.0, 1583.0, 7.4e-05, 0.018955, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1583.0, 6.1e-05, -0.00145, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1808.0, 1583.0, 0.000406, 0.037395, 0.00096667, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2056.0, 1607.0, 8.6e-05, 0.012, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2061.0, 1607.0, 8.4e-05, 0.0052, 0.0, 750.0, 0.0,0.0,1.07045, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2055.0, 1607.0, 0.00064, 0.0098, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 1588.0, 8.2e-05, 0.01899, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1588.0, 9.5e-05, 0.00187, 0.0, 750.0, 0.0,0.0,1.07045, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1967.0, 1588.0, 0.000595, 0.04896, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2050.0, 1606.0, 0.000124, 0.026467, 0.003, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2054.0, 1606.0, 8.8e-05, -0.00659, 0.003, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2049.0, 1606.0, 0.000433, 0.03668, 0.003, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2019.0, 1605.0, 6.9e-05, 0.01806, 0.000725, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2018.0, 1605.0, 8.7e-05, -0.00197, 0.000725, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2017.0, 1605.0, 0.000344, 0.03106, 0.000725, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2081.0, 1576.0, 5.9e-05, 0.017137, 0.0009, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2052.0, 1576.0, 7.4e-05, -0.0013, 0.0009, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1880.0, 1576.0, 0.000392, 0.036947, 0.0009, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1604.0, 8.3e-05, 0.019047, 0.001425, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 1604.0, 6.1e-05, -0.00317, 0.001425, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1878.0, 1604.0, 0.000339, 0.031247, 0.001425, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1582.0, 6e-05, 0.017225, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 1582.0, 7.3e-05, -0.00129, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1877.0, 1582.0, 0.000392, 0.036925, 0.00096667, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 1558.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1558.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1761.0, 1558.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 1557.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1557.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1760.0, 1557.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1787.0, 8.0, 0.000881, 0.085611, 0.000444, 180.0, 0.0,0.0,1.0625, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1646.0, 8.0, 0.000767, -0.00617, 0.000444, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[10.0, 8.0, 9.1e-05, 0.051056, 0.000444, 90.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1787.0, 7.0, 0.000881, 0.085611, 0.000444, 180.0, 0.0,0.0,1.0625, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1647.0, 7.0, 0.000767, -0.00617, 0.000444, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[9.0, 7.0, 9.1e-05, 0.051056, 0.000444, 90.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2046.0, 1603.0, 0.0, 0.04475, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1935.0, 1603.0, 0.0, -0.00462, 0.0, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2043.0, 1603.0, 0.0, 0.07026, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2096.0, 1601.0, 0.0018, 0.1243, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1602.0, 1601.0, 0.0015, 0.0698, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2041.0, 1601.0, 0.0014, -0.0077, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2179.0, 1598.0, 0.0063, 0.2671, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1600.0, 1598.0, 0.0058, 0.1401, 0.0, 400.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1599.0, 1598.0, 0.003, -0.0097, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2179.0, 1596.0, 0.0063, 0.2652, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1600.0, 1596.0, 0.0059, 0.1419, 0.0, 400.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1597.0, 1596.0, 0.0028, -0.0079, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1895.0, 1575.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1893.0, 1575.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1890.0, 1575.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1892.0, 1574.0, 9.1e-05, 0.02099, 0.0, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1891.0, 1574.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1889.0, 1574.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2033.0, 1595.0, 8.5e-05, 0.01857, 0.00183333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2035.0, 1595.0, 4.7e-05, -0.00287, 0.00183333, 1000.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2031.0, 1595.0, 0.000426, 0.03594, 0.00183333, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1981.0, 1593.0, 7.3e-05, 0.0163, 0.001, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1980.0, 1593.0, 5.4e-05, -0.001, 0.001, 1000.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1979.0, 1593.0, 0.000377, 0.03705, 0.001, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2023.0, 1594.0, 0.000116, 0.018433, 0.002075, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2025.0, 1594.0, 7.4e-05, -0.00326, 0.002075, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2022.0, 1594.0, 0.000476, 0.032887, 0.002075, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1589.0, 6.4e-05, 0.016337, 0.00120233, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 1589.0, 6.3e-05, -0.0024, 0.00120233, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1862.0, 1589.0, 0.000244, 0.030978, 0.00120233, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 1581.0, 8.5e-05, 0.018221, 0.001275, 750.0, 0.0,0.0,1.072, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1581.0, 8.5e-05, -0.00243, 0.001275, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1879.0, 1581.0, -9e-05, 0.041486, 0.001275, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 1579.0, 8.4e-05, 0.018087, 0.00135, 750.0, 0.0,0.0,1.072, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1579.0, 8.4e-05, -0.00222, 0.00135, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1580.0, 1579.0, -8e-05, 0.04158, 0.00135, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1771.0, 1560.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1560.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1757.0, 1560.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1771.0, 1559.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1559.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1756.0, 1559.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1853.0, 1571.0, 6.1e-05, 0.01713, 0.00126667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1852.0, 1571.0, 7.3e-05, -0.00142, 0.00126667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1851.0, 1571.0, 0.000408, 0.0376, 0.00126667, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1926.0, 1577.0, 5e-05, 0.01767, 0.00133333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 1577.0, 7e-05, -0.00193, 0.00133333, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1882.0, 1577.0, 0.000396, 0.03757, 0.00133333, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ]
])
ppc["gencost"] = array([
[2.0, 0.0, 0.0, 3.0, 0.0, 44.0, 0.0, 66.0, 33.0, 52.8, 26.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 44.0, 0.0, 66.0, 33.0, 52.8, 26.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 50.0, 0.0, 75.0, 37.5, 60.0, 30.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 30.0, 0.0, 45.0, 22.5, 36.0, 18.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 30.0, 0.0, 45.0, 22.5, 36.0, 18.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 400.0, 0.0, 600.0, 300.0, 480.0, 240.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 400.0, 0.0, 600.0, 300.0, 480.0, 240.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 14.4, 0.0, 21.6, 10.8, 17.28, 8.64 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 14.4, 0.0, 21.6, 10.8, 17.28, 8.64 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 16.8, 0.0, 25.2, 12.6, 20.16, 10.08 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 16.8, 0.0, 25.2, 12.6, 20.16, 10.08 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 48.0, 0.0, 72.0, 36.0, 57.6, 28.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 48.0, 0.0, 72.0, 36.0, 57.6, 28.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 60.0, 0.0, 90.0, 45.0, 72.0, 36.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 60.0, 0.0, 90.0, 45.0, 72.0, 36.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 84.0, 0.0, 126.0, 63.0, 100.8, 50.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 39.6, 0.0, 59.4, 29.7, 47.52, 23.76 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 320.0, 0.0, 480.0, 240.0, 384.0, 192.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 320.0, 0.0, 480.0, 240.0, 384.0, 192.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 403.2, 0.0, 604.8, 302.4, 483.84, 241.92 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 403.2, 0.0, 604.8, 302.4, 483.84, 241.92 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 54.0, 0.0, 81.0, 40.5, 64.8, 32.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 54.0, 0.0, 81.0, 40.5, 64.8, 32.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ]
])
return ppc | 134.249583 | 192 | 0.340559 | 69,341 | 321,662 | 1.579772 | 0.051874 | 0.408406 | 0.433118 | 0.448116 | 0.767388 | 0.763298 | 0.759501 | 0.757237 | 0.755174 | 0.749176 | 0 | 0.587492 | 0.424048 | 321,662 | 2,396 | 193 | 134.249583 | 0.003795 | 0 | 0 | 0.129382 | 0 | 0 | 0.000106 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.000417 | false | 0 | 0.000417 | 0 | 0.001252 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
078230240d46eeeb489518f04118afd312916c8a | 17,076 | py | Python | mev/api/storage_backends/tests/test_google_cloud.py | web-mev/mev-backend | bed15d399efc8cc1fd7ff2b8a3bd2dc55c83ecd8 | [
"MIT"
] | 2 | 2021-11-15T08:11:59.000Z | 2022-03-12T05:24:23.000Z | mev/api/storage_backends/tests/test_google_cloud.py | web-mev/mev-backend | bed15d399efc8cc1fd7ff2b8a3bd2dc55c83ecd8 | [
"MIT"
] | 37 | 2020-08-03T14:57:02.000Z | 2022-02-25T19:56:40.000Z | mev/api/storage_backends/tests/test_google_cloud.py | web-mev/mev-backend | bed15d399efc8cc1fd7ff2b8a3bd2dc55c83ecd8 | [
"MIT"
] | 2 | 2021-07-12T03:22:52.000Z | 2021-11-15T08:12:01.000Z | import unittest.mock as mock
import os
from api.tests.base import BaseAPITestCase
from api.models import Resource
from api.storage_backends.base import BaseStorageBackend
from api.storage_backends.google_cloud import GoogleBucketStorage
import google
DUMMY_BUCKETNAME = 'a-google-bucket'
class TestGoogleBucketStorage(BaseAPITestCase):
def setUp(self):
self.establish_clients()
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
@mock.patch('api.storage_backends.google_cloud.settings')
def test_resource_path_altered_correctly(self,
mock_settings,
mock_service_account,
mock_storage,
mock_os_exists):
resources = Resource.objects.filter(owner=self.regular_user_1)
r = resources[0]
original_path = r.path
owner_uuid = self.regular_user_1.pk
expected_basename = '{uuid}.{name}'.format(
uuid = str(r.pk),
name = os.path.basename(original_path)
)
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_bucket = mock.MagicMock()
mock_upload_blob = mock.MagicMock()
storage_backend.get_or_create_bucket = mock.MagicMock()
storage_backend.get_or_create_bucket.return_value = mock_bucket
storage_backend.upload_blob = mock_upload_blob
path = storage_backend.store(r)
mock_os_exists.return_value = True
mock_upload_blob.assert_called()
storage_backend.get_or_create_bucket.assert_called()
expected_destination = os.path.join( GoogleBucketStorage.BUCKET_PREFIX, \
DUMMY_BUCKETNAME, \
Resource.USER_RESOURCE_STORAGE_DIRNAME, \
str(owner_uuid), expected_basename)
self.assertEqual(path, expected_destination)
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
@mock.patch('api.storage_backends.google_cloud.settings')
def test_bucket_transfer_call(self,
mock_settings,
mock_service_account,
mock_storage, mock_os_exists):
'''
If an analysis is performed remotely (so that files are located in
bucket storage) and the storage backend is also bucket-based, we need to
perform an inter-bucket transfer. Test that the proper calls are made
'''
resources = Resource.objects.filter(owner=self.regular_user_1)
r = resources[0]
original_path = r.path
owner_uuid = self.regular_user_1.pk
expected_basename = '{uuid}.{name}'.format(
uuid = str(r.pk),
name = os.path.basename(original_path)
)
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
mock_settings.STORAGE_CREDENTIALS = '/some/dummy/path'
storage_backend = GoogleBucketStorage()
mock_bucket = mock.MagicMock()
mock_upload_blob = mock.MagicMock()
mock_interbucket_transfer = mock.MagicMock()
storage_backend.get_or_create_bucket = mock.MagicMock()
storage_backend.get_or_create_bucket.return_value = mock_bucket
storage_backend.upload_blob = mock_upload_blob
storage_backend.perform_interbucket_transfer = mock_interbucket_transfer
# If this is False, then the Resource does not exist on the local filesystem.
# This is what triggers the alternative behavior of performing an interbucket
# transfer
mock_os_exists.return_value = False
path = storage_backend.store(r)
mock_upload_blob.assert_not_called()
mock_interbucket_transfer.assert_called()
storage_backend.get_or_create_bucket.assert_called()
expected_destination = os.path.join( GoogleBucketStorage.BUCKET_PREFIX, \
DUMMY_BUCKETNAME, \
Resource.USER_RESOURCE_STORAGE_DIRNAME, \
str(owner_uuid), expected_basename)
self.assertEqual(path, expected_destination)
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_local_resource_pull_case1(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
To validate files, we need them locally. This tests that the
`get_local_resource_path` performs the proper calls if the resource
is not in our local cache. Also checks that the local user cache
directory is created (via mock)
'''
resources = Resource.objects.filter(owner=self.regular_user_1)
r = resources[0]
relative_path = BaseStorageBackend.construct_relative_path(r)
cache_dir = '/some/cache/dir'
mock_settings.RESOURCE_CACHE_DIR = cache_dir
mock_exists.return_value = False
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_get_blob = mock.MagicMock()
mock_blob = mock.MagicMock()
mock_get_blob.return_value = mock_blob
storage_backend.get_blob = mock_get_blob
expected_final_location = os.path.join(cache_dir, relative_path)
location = storage_backend.get_local_resource_path(r)
mock_blob.download_to_filename.assert_called()
mock_make_local_directory.assert_called_with(os.path.dirname(location))
self.assertEqual(location, expected_final_location)
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_local_resource_pull_case2(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
To validate files, we need them locally. This tests that the
`get_local_resource_path` performs the proper calls if the resource
is not in our local cache. In this case, the user's local cache
directory already exists.
'''
resources = Resource.objects.filter(owner=self.regular_user_1)
r = resources[0]
relative_path = BaseStorageBackend.construct_relative_path(r)
cache_dir = '/some/cache/dir'
mock_settings.RESOURCE_CACHE_DIR = cache_dir
mock_exists.side_effect = [True, False]
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_get_blob = mock.MagicMock()
mock_blob = mock.MagicMock()
mock_blob.download_to_filename.side_effect = [None,]
mock_get_blob.return_value = mock_blob
storage_backend.get_blob = mock_get_blob
expected_final_location = os.path.join(cache_dir, relative_path)
location = storage_backend.get_local_resource_path(r)
self.assertEqual(1,mock_blob.download_to_filename.call_count)
mock_make_local_directory.assert_not_called()
self.assertEqual(location, expected_final_location)
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_local_resource_pull_case3(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
To validate files, we need them locally. This tests that the
`get_local_resource_path` performs the proper calls if the resource
is, in fact, already in the local cache
'''
resources = Resource.objects.filter(owner=self.regular_user_1)
r = resources[0]
relative_path = BaseStorageBackend.construct_relative_path(r)
cache_dir = '/some/cache/dir'
mock_settings.RESOURCE_CACHE_DIR = cache_dir
mock_exists.side_effect = [True, True]
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_get_blob = mock.MagicMock()
mock_blob = mock.MagicMock()
mock_get_blob.return_value = mock_blob
storage_backend.get_blob = mock_get_blob
expected_final_location = os.path.join(cache_dir, relative_path)
location = storage_backend.get_local_resource_path(r)
mock_blob.download_to_filename.assert_not_called()
mock_make_local_directory.assert_not_called()
self.assertEqual(location, expected_final_location)
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_local_resource_pull_retry(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
To validate files, we need them locally. This tests that the
`get_local_resource_path` performs the proper calls if the resource
is not in our local cache. Also checks that the local user cache
directory is created (via mock)
'''
resources = Resource.objects.filter(owner=self.regular_user_1)
r = resources[0]
relative_path = BaseStorageBackend.construct_relative_path(r)
cache_dir = '/some/cache/dir'
mock_settings.RESOURCE_CACHE_DIR = cache_dir
mock_exists.return_value = False
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_get_blob = mock.MagicMock()
mock_blob = mock.MagicMock()
mock_blob.download_to_filename.side_effect = [Exception('Something bad'), None]
mock_get_blob.return_value = mock_blob
storage_backend.get_blob = mock_get_blob
expected_final_location = os.path.join(cache_dir, relative_path)
location = storage_backend.get_local_resource_path(r)
self.assertEqual(2,mock_blob.download_to_filename.call_count)
mock_make_local_directory.assert_called_with(os.path.dirname(location))
self.assertEqual(location, expected_final_location)
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_resource_exists_case1(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
Test the case where the object is not found since the bucket
is not found by the google api client.
'''
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_client = mock.MagicMock()
storage_backend.storage_client = mock_client
mock_client.get_bucket.side_effect = google.api_core.exceptions.NotFound('ack!')
with self.assertRaises(google.api_core.exceptions.NotFound):
storage_backend.get_bucket('foo')
self.assertFalse(storage_backend.resource_exists('gs://foo/something.txt'))
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_resource_exists_case2(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
Tests the case where we don't have access to the object
in the bucket since the bucket permissions block our access.
Note, however, that you can encounter situations where the bucket
access is blocked, but the actual object IS public. We handle
that case elsewhere.
'''
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_client = mock.MagicMock()
storage_backend.storage_client = mock_client
mock_client.get_bucket.side_effect = google.api_core.exceptions.Forbidden('ack!')
with self.assertRaises(google.api_core.exceptions.Forbidden):
storage_backend.get_bucket('foo')
self.assertFalse(storage_backend.resource_exists('gs://foo/something.txt'))
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_resource_exists_case3(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
This mocks out the get_blob method so that it returns
something that is not None
'''
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_client = mock.MagicMock()
storage_backend.storage_client = mock_client
mock_blob = mock.MagicMock()
mock_get_blob = mock.MagicMock()
mock_get_blob.return_value = mock_blob
storage_backend.get_blob = mock_get_blob
self.assertTrue(storage_backend.resource_exists('gs://foo/something.txt'))
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_resource_exists_case4(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
This mocks out the get_blob method so that it returns
None ()
'''
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_client = mock.MagicMock()
storage_backend.storage_client = mock_client
mock_get_blob = mock.MagicMock()
mock_get_blob.return_value = None
storage_backend.get_blob = mock_get_blob
self.assertFalse(storage_backend.resource_exists('gs://foo/something.txt'))
@mock.patch('api.storage_backends.google_cloud.make_local_directory')
@mock.patch('api.storage_backends.google_cloud.os.path.exists')
@mock.patch('api.storage_backends.google_cloud.settings')
@mock.patch('api.storage_backends.google_cloud.storage')
@mock.patch('api.storage_backends.google_cloud.service_account')
def test_resource_exists_case5(self, \
mock_service_account, \
mock_storage, \
mock_settings, \
mock_exists, \
mock_make_local_directory):
'''
Here we mock that *something* raised an exception in the process of getting
either the bucket or the object. Hence, the get_blob method will raise an ex
and we check that the existence method returns False appropriately.
'''
os.environ['STORAGE_BUCKET_NAME'] = DUMMY_BUCKETNAME
storage_backend = GoogleBucketStorage()
mock_client = mock.MagicMock()
storage_backend.storage_client = mock_client
mock_get_blob = mock.MagicMock()
mock_get_blob.side_effect = Exception('ack')
storage_backend.get_blob = mock_get_blob
self.assertFalse(storage_backend.resource_exists('gs://foo/something.txt')) | 41.750611 | 89 | 0.702214 | 2,088 | 17,076 | 5.419061 | 0.099617 | 0.048608 | 0.087494 | 0.114538 | 0.8692 | 0.85992 | 0.853911 | 0.852408 | 0.843924 | 0.843924 | 0 | 0.00178 | 0.210529 | 17,076 | 409 | 90 | 41.750611 | 0.837549 | 0.11443 | 0 | 0.827586 | 0 | 0 | 0.199266 | 0.175071 | 0 | 0 | 0 | 0 | 0.089655 | 1 | 0.041379 | false | 0 | 0.024138 | 0 | 0.068966 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
07a50f8404bd7d50c6caafacfdd6b1b27f69dc73 | 158,710 | py | Python | sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/aio/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py | ankitarorabit/azure-sdk-for-python | dd90281cbad9400f8080754a5ef2f56791a5a88f | [
"MIT"
] | null | null | null | sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/aio/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py | ankitarorabit/azure-sdk-for-python | dd90281cbad9400f8080754a5ef2f56791a5a88f | [
"MIT"
] | 1 | 2021-05-31T08:56:01.000Z | 2021-05-31T08:56:01.000Z | sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/aio/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py | ankitarorabit/azure-sdk-for-python | dd90281cbad9400f8080754a5ef2f56791a5a88f | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AzureCognitiveServiceMetricsAdvisorRESTAPIOpenAPIV2OperationsMixin:
async def get_active_series_count(
self,
**kwargs: Any
) -> "_models.UsageStats":
"""Get latest usage stats.
Get latest usage stats.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: UsageStats, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.UsageStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.UsageStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_active_series_count.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('UsageStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_active_series_count.metadata = {'url': '/stats/latest'} # type: ignore
async def get_anomaly_alerting_configuration(
self,
configuration_id: str,
**kwargs: Any
) -> "_models.AnomalyAlertingConfiguration":
"""Query a single anomaly alerting configuration.
Query a single anomaly alerting configuration.
:param configuration_id: anomaly alerting configuration unique id.
:type configuration_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AnomalyAlertingConfiguration, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.AnomalyAlertingConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyAlertingConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_anomaly_alerting_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('AnomalyAlertingConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_anomaly_alerting_configuration.metadata = {'url': '/alert/anomaly/configurations/{configurationId}'} # type: ignore
async def update_anomaly_alerting_configuration(
self,
configuration_id: str,
body: Any,
**kwargs: Any
) -> "_models.AnomalyAlertingConfiguration":
"""Update anomaly alerting configuration.
Update anomaly alerting configuration.
:param configuration_id: anomaly alerting configuration unique id.
:type configuration_id: str
:param body: anomaly alerting configuration.
:type body: any
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AnomalyAlertingConfiguration, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.AnomalyAlertingConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyAlertingConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/merge-patch+json")
accept = "application/json"
# Construct URL
url = self.update_anomaly_alerting_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'object')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('AnomalyAlertingConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_anomaly_alerting_configuration.metadata = {'url': '/alert/anomaly/configurations/{configurationId}'} # type: ignore
async def delete_anomaly_alerting_configuration(
self,
configuration_id: str,
**kwargs: Any
) -> None:
"""Delete anomaly alerting configuration.
Delete anomaly alerting configuration.
:param configuration_id: anomaly alerting configuration unique id.
:type configuration_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_anomaly_alerting_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete_anomaly_alerting_configuration.metadata = {'url': '/alert/anomaly/configurations/{configurationId}'} # type: ignore
async def create_anomaly_alerting_configuration(
self,
body: "_models.AnomalyAlertingConfiguration",
**kwargs: Any
) -> None:
"""Create anomaly alerting configuration.
Create anomaly alerting configuration.
:param body: anomaly alerting configuration.
:type body: ~azure.ai.metricsadvisor.models.AnomalyAlertingConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_anomaly_alerting_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'AnomalyAlertingConfiguration')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
if cls:
return cls(pipeline_response, None, response_headers)
create_anomaly_alerting_configuration.metadata = {'url': '/alert/anomaly/configurations'} # type: ignore
def get_alerts_by_anomaly_alerting_configuration(
self,
configuration_id: str,
body: "_models.AlertingResultQuery",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.AlertResultList"]:
"""Query alerts under anomaly alerting configuration.
Query alerts under anomaly alerting configuration.
:param configuration_id: anomaly alerting configuration unique id.
:type configuration_id: str
:param body: query alerting result request.
:type body: ~azure.ai.metricsadvisor.models.AlertingResultQuery
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AlertResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AlertResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AlertResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_alerts_by_anomaly_alerting_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'AlertingResultQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'AlertingResultQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AlertResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_alerts_by_anomaly_alerting_configuration.metadata = {'url': '/alert/anomaly/configurations/{configurationId}/alerts/query'} # type: ignore
def get_anomalies_from_alert_by_anomaly_alerting_configuration(
self,
configuration_id: str,
alert_id: str,
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.AnomalyResultList"]:
"""Query anomalies under a specific alert.
Query anomalies under a specific alert.
:param configuration_id: anomaly alerting configuration unique id.
:type configuration_id: str
:param alert_id: alert id.
:type alert_id: str
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AnomalyResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_anomalies_from_alert_by_anomaly_alerting_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
'alertId': self._serialize.url("alert_id", alert_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
'alertId': self._serialize.url("alert_id", alert_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AnomalyResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_anomalies_from_alert_by_anomaly_alerting_configuration.metadata = {'url': '/alert/anomaly/configurations/{configurationId}/alerts/{alertId}/anomalies'} # type: ignore
def get_incidents_from_alert_by_anomaly_alerting_configuration(
self,
configuration_id: str,
alert_id: str,
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.IncidentResultList"]:
"""Query incidents under a specific alert.
Query incidents under a specific alert.
:param configuration_id: anomaly alerting configuration unique id.
:type configuration_id: str
:param alert_id: alert id.
:type alert_id: str
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IncidentResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.IncidentResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IncidentResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_incidents_from_alert_by_anomaly_alerting_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
'alertId': self._serialize.url("alert_id", alert_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
'alertId': self._serialize.url("alert_id", alert_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('IncidentResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_incidents_from_alert_by_anomaly_alerting_configuration.metadata = {'url': '/alert/anomaly/configurations/{configurationId}/alerts/{alertId}/incidents'} # type: ignore
async def get_anomaly_detection_configuration(
self,
configuration_id: str,
**kwargs: Any
) -> "_models.AnomalyDetectionConfiguration":
"""Query a single anomaly detection configuration.
Query a single anomaly detection configuration.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AnomalyDetectionConfiguration, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.AnomalyDetectionConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyDetectionConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('AnomalyDetectionConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}'} # type: ignore
async def update_anomaly_detection_configuration(
self,
configuration_id: str,
body: Any,
**kwargs: Any
) -> "_models.AnomalyDetectionConfiguration":
"""Update anomaly detection configuration.
Update anomaly detection configuration.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param body: anomaly detection configuration.
:type body: any
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AnomalyDetectionConfiguration, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.AnomalyDetectionConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyDetectionConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/merge-patch+json")
accept = "application/json"
# Construct URL
url = self.update_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'object')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('AnomalyDetectionConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}'} # type: ignore
async def delete_anomaly_detection_configuration(
self,
configuration_id: str,
**kwargs: Any
) -> None:
"""Delete anomaly detection configuration.
Delete anomaly detection configuration.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}'} # type: ignore
async def create_anomaly_detection_configuration(
self,
body: "_models.AnomalyDetectionConfiguration",
**kwargs: Any
) -> None:
"""Create anomaly detection configuration.
Create anomaly detection configuration.
:param body: anomaly detection configuration.
:type body: ~azure.ai.metricsadvisor.models.AnomalyDetectionConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'AnomalyDetectionConfiguration')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
if cls:
return cls(pipeline_response, None, response_headers)
create_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations'} # type: ignore
def get_anomaly_alerting_configurations_by_anomaly_detection_configuration(
self,
configuration_id: str,
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.AnomalyAlertingConfigurationList"]:
"""List all anomaly alerting configurations for specific anomaly detection configuration.
List all anomaly alerting configurations for specific anomaly detection configuration.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AnomalyAlertingConfigurationList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyAlertingConfigurationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyAlertingConfigurationList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_anomaly_alerting_configurations_by_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AnomalyAlertingConfigurationList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_anomaly_alerting_configurations_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/alert/anomaly/configurations'} # type: ignore
def get_series_by_anomaly_detection_configuration(
self,
configuration_id: str,
body: "_models.DetectionSeriesQuery",
**kwargs: Any
) -> AsyncIterable["_models.SeriesResultList"]:
"""Query series enriched by anomaly detection.
Query series enriched by anomaly detection.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param body: query series detection result request.
:type body: ~azure.ai.metricsadvisor.models.DetectionSeriesQuery
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SeriesResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.SeriesResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SeriesResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_series_by_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DetectionSeriesQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DetectionSeriesQuery')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SeriesResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_series_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/series/query'} # type: ignore
def get_anomalies_by_anomaly_detection_configuration(
self,
configuration_id: str,
body: "_models.DetectionAnomalyResultQuery",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.AnomalyResultList"]:
"""Query anomalies under anomaly detection configuration.
Query anomalies under anomaly detection configuration.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param body: query detection anomaly result request.
:type body: ~azure.ai.metricsadvisor.models.DetectionAnomalyResultQuery
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AnomalyResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_anomalies_by_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DetectionAnomalyResultQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DetectionAnomalyResultQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AnomalyResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_anomalies_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/anomalies/query'} # type: ignore
def get_dimension_of_anomalies_by_anomaly_detection_configuration(
self,
configuration_id: str,
body: "_models.AnomalyDimensionQuery",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.AnomalyDimensionList"]:
"""Query dimension values of anomalies.
Query dimension values of anomalies.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param body: query dimension values request.
:type body: ~azure.ai.metricsadvisor.models.AnomalyDimensionQuery
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AnomalyDimensionList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyDimensionList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyDimensionList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_dimension_of_anomalies_by_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'AnomalyDimensionQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'AnomalyDimensionQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AnomalyDimensionList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_dimension_of_anomalies_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/anomalies/dimension/query'} # type: ignore
def get_incidents_by_anomaly_detection_configuration(
self,
configuration_id: str,
body: "_models.DetectionIncidentResultQuery",
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.IncidentResultList"]:
"""Query incidents under anomaly detection configuration.
Query incidents under anomaly detection configuration.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param body: query detection incident result request.
:type body: ~azure.ai.metricsadvisor.models.DetectionIncidentResultQuery
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IncidentResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.IncidentResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IncidentResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_incidents_by_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DetectionIncidentResultQuery')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DetectionIncidentResultQuery')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('IncidentResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_incidents_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/incidents/query'} # type: ignore
def get_incidents_by_anomaly_detection_configuration_next_pages(
self,
configuration_id: str,
maxpagesize: Optional[int] = None,
token: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.IncidentResultList"]:
"""Query incidents under anomaly detection configuration.
Query incidents under anomaly detection configuration.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:param token: the token for getting the next page.
:type token: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IncidentResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.IncidentResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IncidentResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_incidents_by_anomaly_detection_configuration_next_pages.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
if token is not None:
query_parameters['$token'] = self._serialize.query("token", token, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('IncidentResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_incidents_by_anomaly_detection_configuration_next_pages.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/incidents/query'} # type: ignore
def get_root_cause_of_incident_by_anomaly_detection_configuration(
self,
configuration_id: str,
incident_id: str,
**kwargs: Any
) -> AsyncIterable["_models.RootCauseList"]:
"""Query root cause for incident.
Query root cause for incident.
:param configuration_id: anomaly detection configuration unique id.
:type configuration_id: str
:param incident_id: incident id.
:type incident_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RootCauseList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.RootCauseList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RootCauseList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_root_cause_of_incident_by_anomaly_detection_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
'incidentId': self._serialize.url("incident_id", incident_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'configurationId': self._serialize.url("configuration_id", configuration_id, 'str'),
'incidentId': self._serialize.url("incident_id", incident_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('RootCauseList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_root_cause_of_incident_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/incidents/{incidentId}/rootCause'} # type: ignore
async def create_credential(
self,
body: "_models.DataSourceCredential",
**kwargs: Any
) -> None:
"""Create a new data source credential.
Create a new data source credential.
:param body: Create data source credential request.
:type body: ~azure.ai.metricsadvisor.models.DataSourceCredential
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_credential.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DataSourceCredential')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
if cls:
return cls(pipeline_response, None, response_headers)
create_credential.metadata = {'url': '/credentials'} # type: ignore
def list_credentials(
self,
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.DataSourceCredentialList"]:
"""List all credentials.
List all credentials.
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataSourceCredentialList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.DataSourceCredentialList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataSourceCredentialList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_credentials.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DataSourceCredentialList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_credentials.metadata = {'url': '/credentials'} # type: ignore
async def update_credential(
self,
credential_id: str,
body: "_models.DataSourceCredentialPatch",
**kwargs: Any
) -> "_models.DataSourceCredential":
"""Update a data source credential.
Update a data source credential.
:param credential_id: Data source credential unique ID.
:type credential_id: str
:param body: Update data source credential request.
:type body: ~azure.ai.metricsadvisor.models.DataSourceCredentialPatch
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataSourceCredential, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.DataSourceCredential
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataSourceCredential"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/merge-patch+json")
accept = "application/json"
# Construct URL
url = self.update_credential.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'credentialId': self._serialize.url("credential_id", credential_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DataSourceCredentialPatch')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DataSourceCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore
async def delete_credential(
self,
credential_id: str,
**kwargs: Any
) -> None:
"""Delete a data source credential.
Delete a data source credential.
:param credential_id: Data source credential unique ID.
:type credential_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_credential.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'credentialId': self._serialize.url("credential_id", credential_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore
async def get_credential(
self,
credential_id: str,
**kwargs: Any
) -> "_models.DataSourceCredential":
"""Get a data source credential.
Get a data source credential.
:param credential_id: Data source credential unique ID.
:type credential_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataSourceCredential, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.DataSourceCredential
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataSourceCredential"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_credential.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'credentialId': self._serialize.url("credential_id", credential_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DataSourceCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore
def list_data_feeds(
self,
data_feed_name: Optional[str] = None,
data_source_type: Optional[Union[str, "_models.DataSourceType"]] = None,
granularity_name: Optional[Union[str, "_models.Granularity"]] = None,
status: Optional[Union[str, "_models.EntityStatus"]] = None,
creator: Optional[str] = None,
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.DataFeedList"]:
"""List all data feeds.
List all data feeds.
:param data_feed_name: filter data feed by its name.
:type data_feed_name: str
:param data_source_type: filter data feed by its source type.
:type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
:param granularity_name: filter data feed by its granularity.
:type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
:param status: filter data feed by its status.
:type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
:param creator: filter data feed by its creator.
:type creator: str
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataFeedList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.DataFeedList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataFeedList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_data_feeds.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if data_feed_name is not None:
query_parameters['dataFeedName'] = self._serialize.query("data_feed_name", data_feed_name, 'str')
if data_source_type is not None:
query_parameters['dataSourceType'] = self._serialize.query("data_source_type", data_source_type, 'str')
if granularity_name is not None:
query_parameters['granularityName'] = self._serialize.query("granularity_name", granularity_name, 'str')
if status is not None:
query_parameters['status'] = self._serialize.query("status", status, 'str')
if creator is not None:
query_parameters['creator'] = self._serialize.query("creator", creator, 'str')
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DataFeedList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_data_feeds.metadata = {'url': '/dataFeeds'} # type: ignore
async def create_data_feed(
self,
body: "_models.DataFeedDetail",
**kwargs: Any
) -> None:
"""Create a new data feed.
Create a new data feed.
:param body: parameters to create a data feed.
:type body: ~azure.ai.metricsadvisor.models.DataFeedDetail
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_data_feed.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'DataFeedDetail')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
if cls:
return cls(pipeline_response, None, response_headers)
create_data_feed.metadata = {'url': '/dataFeeds'} # type: ignore
async def get_data_feed_by_id(
self,
data_feed_id: str,
**kwargs: Any
) -> "_models.DataFeedDetail":
"""Get a data feed by its id.
Get a data feed by its id.
:param data_feed_id: The data feed unique id.
:type data_feed_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataFeedDetail, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.DataFeedDetail
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataFeedDetail"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_data_feed_by_id.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'dataFeedId': self._serialize.url("data_feed_id", data_feed_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DataFeedDetail', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_data_feed_by_id.metadata = {'url': '/dataFeeds/{dataFeedId}'} # type: ignore
async def update_data_feed(
self,
data_feed_id: str,
body: Any,
**kwargs: Any
) -> "_models.DataFeedDetail":
"""Update a data feed.
Update a data feed.
:param data_feed_id: The data feed unique id.
:type data_feed_id: str
:param body: parameters to update a data feed.
:type body: any
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataFeedDetail, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.DataFeedDetail
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataFeedDetail"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/merge-patch+json")
accept = "application/json"
# Construct URL
url = self.update_data_feed.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'dataFeedId': self._serialize.url("data_feed_id", data_feed_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'object')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DataFeedDetail', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_data_feed.metadata = {'url': '/dataFeeds/{dataFeedId}'} # type: ignore
async def delete_data_feed(
self,
data_feed_id: str,
**kwargs: Any
) -> None:
"""Delete a data feed.
Delete a data feed.
:param data_feed_id: The data feed unique id.
:type data_feed_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_data_feed.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'dataFeedId': self._serialize.url("data_feed_id", data_feed_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete_data_feed.metadata = {'url': '/dataFeeds/{dataFeedId}'} # type: ignore
async def get_metric_feedback(
self,
feedback_id: str,
**kwargs: Any
) -> "_models.MetricFeedback":
"""Get a metric feedback by its id.
Get a metric feedback by its id.
:param feedback_id: the unique feedback ID.
:type feedback_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MetricFeedback, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.MetricFeedback
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MetricFeedback"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_metric_feedback.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'feedbackId': self._serialize.url("feedback_id", feedback_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('MetricFeedback', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_metric_feedback.metadata = {'url': '/feedback/metric/{feedbackId}'} # type: ignore
def list_metric_feedbacks(
self,
body: "_models.MetricFeedbackFilter",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.MetricFeedbackList"]:
"""List feedback on the given metric.
List feedback on the given metric.
:param body: metric feedback filter.
:type body: ~azure.ai.metricsadvisor.models.MetricFeedbackFilter
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MetricFeedbackList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.MetricFeedbackList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MetricFeedbackList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_metric_feedbacks.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricFeedbackFilter')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricFeedbackFilter')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('MetricFeedbackList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_metric_feedbacks.metadata = {'url': '/feedback/metric/query'} # type: ignore
async def create_metric_feedback(
self,
body: "_models.MetricFeedback",
**kwargs: Any
) -> None:
"""Create a new metric feedback.
Create a new metric feedback.
:param body: metric feedback.
:type body: ~azure.ai.metricsadvisor.models.MetricFeedback
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_metric_feedback.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricFeedback')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
if cls:
return cls(pipeline_response, None, response_headers)
create_metric_feedback.metadata = {'url': '/feedback/metric'} # type: ignore
def list_hooks(
self,
hook_name: Optional[str] = None,
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.HookList"]:
"""List all hooks.
List all hooks.
:param hook_name: filter hook by its name.
:type hook_name: str
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either HookList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.HookList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HookList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_hooks.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if hook_name is not None:
query_parameters['hookName'] = self._serialize.query("hook_name", hook_name, 'str')
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('HookList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_hooks.metadata = {'url': '/hooks'} # type: ignore
async def create_hook(
self,
body: "_models.HookInfo",
**kwargs: Any
) -> None:
"""Create a new hook.
Create a new hook.
:param body: Create hook request.
:type body: ~azure.ai.metricsadvisor.models.HookInfo
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_hook.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'HookInfo')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
if cls:
return cls(pipeline_response, None, response_headers)
create_hook.metadata = {'url': '/hooks'} # type: ignore
async def get_hook(
self,
hook_id: str,
**kwargs: Any
) -> "_models.HookInfo":
"""Get a hook by its id.
Get a hook by its id.
:param hook_id: Hook unique ID.
:type hook_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: HookInfo, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.HookInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HookInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_hook.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'hookId': self._serialize.url("hook_id", hook_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('HookInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_hook.metadata = {'url': '/hooks/{hookId}'} # type: ignore
async def update_hook(
self,
hook_id: str,
body: Any,
**kwargs: Any
) -> "_models.HookInfo":
"""Update a hook.
Update a hook.
:param hook_id: Hook unique ID.
:type hook_id: str
:param body: Update hook request.
:type body: any
:keyword callable cls: A custom type or function that will be passed the direct response
:return: HookInfo, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.HookInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HookInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/merge-patch+json")
accept = "application/json"
# Construct URL
url = self.update_hook.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'hookId': self._serialize.url("hook_id", hook_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'object')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('HookInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_hook.metadata = {'url': '/hooks/{hookId}'} # type: ignore
async def delete_hook(
self,
hook_id: str,
**kwargs: Any
) -> None:
"""Delete a hook.
Delete a hook.
:param hook_id: Hook unique ID.
:type hook_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_hook.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'hookId': self._serialize.url("hook_id", hook_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete_hook.metadata = {'url': '/hooks/{hookId}'} # type: ignore
def get_data_feed_ingestion_status(
self,
data_feed_id: str,
body: "_models.IngestionStatusQueryOptions",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.IngestionStatusList"]:
"""Get data ingestion status by data feed.
Get data ingestion status by data feed.
:param data_feed_id: The data feed unique id.
:type data_feed_id: str
:param body: The query time range.
:type body: ~azure.ai.metricsadvisor.models.IngestionStatusQueryOptions
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IngestionStatusList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.IngestionStatusList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IngestionStatusList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_data_feed_ingestion_status.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'dataFeedId': self._serialize.url("data_feed_id", data_feed_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'IngestionStatusQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'IngestionStatusQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('IngestionStatusList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_data_feed_ingestion_status.metadata = {'url': '/dataFeeds/{dataFeedId}/ingestionStatus/query'} # type: ignore
async def reset_data_feed_ingestion_status(
self,
data_feed_id: str,
body: "_models.IngestionProgressResetOptions",
**kwargs: Any
) -> None:
"""Reset data ingestion status by data feed to backfill data.
Reset data ingestion status by data feed to backfill data.
:param data_feed_id: The data feed unique id.
:type data_feed_id: str
:param body: The backfill time range.
:type body: ~azure.ai.metricsadvisor.models.IngestionProgressResetOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.reset_data_feed_ingestion_status.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'dataFeedId': self._serialize.url("data_feed_id", data_feed_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'IngestionProgressResetOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
reset_data_feed_ingestion_status.metadata = {'url': '/dataFeeds/{dataFeedId}/ingestionProgress/reset'} # type: ignore
async def get_ingestion_progress(
self,
data_feed_id: str,
**kwargs: Any
) -> "_models.DataFeedIngestionProgress":
"""Get data last success ingestion job timestamp by data feed.
Get data last success ingestion job timestamp by data feed.
:param data_feed_id: The data feed unique id.
:type data_feed_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataFeedIngestionProgress, or the result of cls(response)
:rtype: ~azure.ai.metricsadvisor.models.DataFeedIngestionProgress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataFeedIngestionProgress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_ingestion_progress.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'dataFeedId': self._serialize.url("data_feed_id", data_feed_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DataFeedIngestionProgress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_ingestion_progress.metadata = {'url': '/dataFeeds/{dataFeedId}/ingestionProgress'} # type: ignore
def get_metric_data(
self,
metric_id: str,
body: "_models.MetricDataQueryOptions",
**kwargs: Any
) -> AsyncIterable["_models.MetricDataList"]:
"""Get time series data from metric.
Get time series data from metric.
:param metric_id: metric unique id.
:type metric_id: str
:param body: query time series data condition.
:type body: ~azure.ai.metricsadvisor.models.MetricDataQueryOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MetricDataList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.MetricDataList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MetricDataList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_metric_data.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'metricId': self._serialize.url("metric_id", metric_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricDataQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'metricId': self._serialize.url("metric_id", metric_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricDataQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('MetricDataList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_metric_data.metadata = {'url': '/metrics/{metricId}/data/query'} # type: ignore
def get_metric_series(
self,
metric_id: str,
body: "_models.MetricSeriesQueryOptions",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.MetricSeriesList"]:
"""List series (dimension combinations) from metric.
List series (dimension combinations) from metric.
:param metric_id: metric unique id.
:type metric_id: str
:param body: filter to query series.
:type body: ~azure.ai.metricsadvisor.models.MetricSeriesQueryOptions
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MetricSeriesList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.MetricSeriesList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MetricSeriesList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_metric_series.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'metricId': self._serialize.url("metric_id", metric_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricSeriesQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricSeriesQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('MetricSeriesList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_metric_series.metadata = {'url': '/metrics/{metricId}/series/query'} # type: ignore
def get_metric_dimension(
self,
metric_id: str,
body: "_models.MetricDimensionQueryOptions",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.MetricDimensionList"]:
"""List dimension from certain metric.
List dimension from certain metric.
:param metric_id: metric unique id.
:type metric_id: str
:param body: query dimension option.
:type body: ~azure.ai.metricsadvisor.models.MetricDimensionQueryOptions
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MetricDimensionList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.MetricDimensionList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MetricDimensionList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_metric_dimension.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'metricId': self._serialize.url("metric_id", metric_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricDimensionQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MetricDimensionQueryOptions')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('MetricDimensionList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_metric_dimension.metadata = {'url': '/metrics/{metricId}/dimension/query'} # type: ignore
def get_anomaly_detection_configurations_by_metric(
self,
metric_id: str,
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.AnomalyDetectionConfigurationList"]:
"""List all anomaly detection configurations for specific metric.
List all anomaly detection configurations for specific metric.
:param metric_id: metric unique id.
:type metric_id: str
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AnomalyDetectionConfigurationList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyDetectionConfigurationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AnomalyDetectionConfigurationList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_anomaly_detection_configurations_by_metric.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'metricId': self._serialize.url("metric_id", metric_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'metricId': self._serialize.url("metric_id", metric_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AnomalyDetectionConfigurationList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_anomaly_detection_configurations_by_metric.metadata = {'url': '/metrics/{metricId}/enrichment/anomalyDetection/configurations'} # type: ignore
def get_enrichment_status_by_metric(
self,
metric_id: str,
body: "_models.EnrichmentStatusQueryOption",
skip: Optional[int] = None,
maxpagesize: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.EnrichmentStatusList"]:
"""Query anomaly detection status.
Query anomaly detection status.
:param metric_id: metric unique id.
:type metric_id: str
:param body: query options.
:type body: ~azure.ai.metricsadvisor.models.EnrichmentStatusQueryOption
:param skip: for paging, skipped number.
:type skip: int
:param maxpagesize: the maximum number of items in one page.
:type maxpagesize: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EnrichmentStatusList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.EnrichmentStatusList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EnrichmentStatusList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_enrichment_status_by_metric.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'metricId': self._serialize.url("metric_id", metric_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if maxpagesize is not None:
query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'EnrichmentStatusQueryOption')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = '{nextLink}' # FIXME: manually edited; was '/{nextLink}'
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'EnrichmentStatusQueryOption')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('EnrichmentStatusList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_enrichment_status_by_metric.metadata = {'url': '/metrics/{metricId}/status/enrichment/anomalyDetection/query'} # type: ignore
| 48.16692 | 203 | 0.643148 | 16,545 | 158,710 | 5.942279 | 0.018011 | 0.034776 | 0.014749 | 0.020506 | 0.9383 | 0.924091 | 0.907532 | 0.896211 | 0.88425 | 0.880059 | 0 | 0.004602 | 0.257892 | 158,710 | 3,294 | 204 | 48.181542 | 0.830132 | 0.146872 | 0 | 0.828436 | 0 | 0 | 0.10675 | 0.04432 | 0 | 0 | 0 | 0.002429 | 0 | 1 | 0.018957 | false | 0 | 0.003318 | 0 | 0.078673 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
07e4db0452840cd736ac7ae5d2bb469e277d44ed | 7,970 | py | Python | sdk_liteos/third_party/u-boot-v2019.07/u-boot-v2019.07/test/py/tests/test_efi_selftest.py | openharmony-gitee-mirror/device_bearpi_bearpi_hm_nano | c463575de065aad080f730ffbd479628eb821105 | [
"BSD-3-Clause"
] | 1 | 2022-02-15T08:51:55.000Z | 2022-02-15T08:51:55.000Z | sdk_liteos/third_party/u-boot-v2019.07/u-boot-v2019.07/test/py/tests/test_efi_selftest.py | openharmony-gitee-mirror/device_bearpi_bearpi_hm_nano | c463575de065aad080f730ffbd479628eb821105 | [
"BSD-3-Clause"
] | null | null | null | sdk_liteos/third_party/u-boot-v2019.07/u-boot-v2019.07/test/py/tests/test_efi_selftest.py | openharmony-gitee-mirror/device_bearpi_bearpi_hm_nano | c463575de065aad080f730ffbd479628eb821105 | [
"BSD-3-Clause"
] | 1 | 2021-12-15T09:54:37.000Z | 2021-12-15T09:54:37.000Z | # SPDX-License-Identifier: GPL-2.0
# Copyright (c) 2017, Heinrich Schuchardt <xypron.glpk@gmx.de>
# Test efi API implementation
import pytest
import u_boot_utils
@pytest.mark.buildconfigspec('cmd_bootefi_selftest')
def test_efi_selftest(u_boot_console):
"""Test the UEFI implementation
:param u_boot_console: U-Boot console
This function executes all selftests that are not marked as on request.
"""
u_boot_console.run_command(cmd='setenv efi_selftest')
u_boot_console.run_command(cmd='bootefi selftest', wait_for_prompt=False)
m = u_boot_console.p.expect(['Summary: 0 failures', 'Press any key'])
if m != 0:
raise Exception('Failures occurred during the EFI selftest')
u_boot_console.run_command(cmd='', wait_for_echo=False, wait_for_prompt=False);
m = u_boot_console.p.expect(['resetting', 'U-Boot'])
if m != 0:
raise Exception('Reset failed during the EFI selftest')
u_boot_console.restart_uboot();
@pytest.mark.buildconfigspec('cmd_bootefi_selftest')
@pytest.mark.buildconfigspec('of_control')
@pytest.mark.notbuildconfigspec('generate_acpi_table')
def test_efi_selftest_device_tree(u_boot_console):
u_boot_console.run_command(cmd='setenv efi_selftest list')
output = u_boot_console.run_command('bootefi selftest')
assert '\'device tree\'' in output
u_boot_console.run_command(cmd='setenv efi_selftest device tree')
u_boot_console.run_command(cmd='setenv -f serial# Testing DT')
u_boot_console.run_command(cmd='bootefi selftest ${fdtcontroladdr}', wait_for_prompt=False)
m = u_boot_console.p.expect(['serial-number: Testing DT', 'U-Boot'])
if m != 0:
raise Exception('Reset failed in \'device tree\' test')
u_boot_console.restart_uboot();
@pytest.mark.buildconfigspec('cmd_bootefi_selftest')
def test_efi_selftest_watchdog_reboot(u_boot_console):
u_boot_console.run_command(cmd='setenv efi_selftest list')
output = u_boot_console.run_command('bootefi selftest')
assert '\'watchdog reboot\'' in output
u_boot_console.run_command(cmd='setenv efi_selftest watchdog reboot')
u_boot_console.run_command(cmd='bootefi selftest', wait_for_prompt=False)
m = u_boot_console.p.expect(['resetting', 'U-Boot'])
if m != 0:
raise Exception('Reset failed in \'watchdog reboot\' test')
u_boot_console.restart_uboot();
@pytest.mark.buildconfigspec('cmd_bootefi_selftest')
def test_efi_selftest_text_input(u_boot_console):
"""Test the EFI_SIMPLE_TEXT_INPUT_PROTOCOL
:param u_boot_console: U-Boot console
This function calls the text input EFI selftest.
"""
u_boot_console.run_command(cmd='setenv efi_selftest text input')
output = u_boot_console.run_command(cmd='bootefi selftest',
wait_for_prompt=False)
m = u_boot_console.p.expect(['To terminate type \'x\''])
if m != 0:
raise Exception('No prompt for \'text input\' test')
u_boot_console.drain_console()
u_boot_console.p.timeout = 500
# EOT
u_boot_console.run_command(cmd=chr(4), wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 4 \(unknown\), scan code 0 \(Null\)'])
if m != 0:
raise Exception('EOT failed in \'text input\' test')
u_boot_console.drain_console()
# BS
u_boot_console.run_command(cmd=chr(8), wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 8 \(BS\), scan code 0 \(Null\)'])
if m != 0:
raise Exception('BS failed in \'text input\' test')
u_boot_console.drain_console()
# TAB
u_boot_console.run_command(cmd=chr(9), wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 9 \(TAB\), scan code 0 \(Null\)'])
if m != 0:
raise Exception('BS failed in \'text input\' test')
u_boot_console.drain_console()
# a
u_boot_console.run_command(cmd='a', wait_for_echo=False, send_nl=False,
wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 97 \(\'a\'\), scan code 0 \(Null\)'])
if m != 0:
raise Exception('\'a\' failed in \'text input\' test')
u_boot_console.drain_console()
# UP escape sequence
u_boot_console.run_command(cmd=chr(27) + '[A', wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 0 \(Null\), scan code 1 \(Up\)'])
if m != 0:
raise Exception('UP failed in \'text input\' test')
u_boot_console.drain_console()
# Euro sign
u_boot_console.run_command(cmd='\xe2\x82\xac', wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(['Unicode char 8364 \(\''])
if m != 0:
raise Exception('Euro sign failed in \'text input\' test')
u_boot_console.drain_console()
u_boot_console.run_command(cmd='x', wait_for_echo=False, send_nl=False,
wait_for_prompt=False)
m = u_boot_console.p.expect(['Summary: 0 failures', 'Press any key'])
if m != 0:
raise Exception('Failures occurred during the EFI selftest')
u_boot_console.restart_uboot();
@pytest.mark.buildconfigspec('cmd_bootefi_selftest')
def test_efi_selftest_text_input_ex(u_boot_console):
"""Test the EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL
:param u_boot_console: U-Boot console
This function calls the extended text input EFI selftest.
"""
u_boot_console.run_command(cmd='setenv efi_selftest extended text input')
output = u_boot_console.run_command(cmd='bootefi selftest',
wait_for_prompt=False)
m = u_boot_console.p.expect(['To terminate type \'CTRL\+x\''])
if m != 0:
raise Exception('No prompt for \'text input\' test')
u_boot_console.drain_console()
u_boot_console.p.timeout = 500
# EOT
u_boot_console.run_command(cmd=chr(4), wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 100 \\(\'d\'\\), scan code 0 \\(CTRL\\+Null\\)'])
if m != 0:
raise Exception('EOT failed in \'text input\' test')
u_boot_console.drain_console()
# BS
u_boot_console.run_command(cmd=chr(8), wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 8 \(BS\), scan code 0 \(\+Null\)'])
if m != 0:
raise Exception('BS failed in \'text input\' test')
u_boot_console.drain_console()
# TAB
u_boot_console.run_command(cmd=chr(9), wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 9 \(TAB\), scan code 0 \(\+Null\)'])
if m != 0:
raise Exception('TAB failed in \'text input\' test')
u_boot_console.drain_console()
# a
u_boot_console.run_command(cmd='a', wait_for_echo=False, send_nl=False,
wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 97 \(\'a\'\), scan code 0 \(Null\)'])
if m != 0:
raise Exception('\'a\' failed in \'text input\' test')
u_boot_console.drain_console()
# UP escape sequence
u_boot_console.run_command(cmd=chr(27) + '[A', wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 0 \(Null\), scan code 1 \(\+Up\)'])
if m != 0:
raise Exception('UP failed in \'text input\' test')
u_boot_console.drain_console()
# Euro sign
u_boot_console.run_command(cmd='\xe2\x82\xac', wait_for_echo=False,
send_nl=False, wait_for_prompt=False)
m = u_boot_console.p.expect(['Unicode char 8364 \(\''])
if m != 0:
raise Exception('Euro sign failed in \'text input\' test')
u_boot_console.drain_console()
# SHIFT+ALT+FN 5
u_boot_console.run_command(cmd='\x1b\x5b\x31\x35\x3b\x34\x7e',
wait_for_echo=False, send_nl=False,
wait_for_prompt=False)
m = u_boot_console.p.expect(
['Unicode char 0 \(Null\), scan code 15 \(SHIFT\+ALT\+FN 5\)'])
if m != 0:
raise Exception('SHIFT+ALT+FN 5 failed in \'text input\' test')
u_boot_console.drain_console()
u_boot_console.run_command(cmd=chr(24), wait_for_echo=False, send_nl=False,
wait_for_prompt=False)
m = u_boot_console.p.expect(['Summary: 0 failures', 'Press any key'])
if m != 0:
raise Exception('Failures occurred during the EFI selftest')
u_boot_console.restart_uboot();
| 39.261084 | 92 | 0.730991 | 1,291 | 7,970 | 4.238575 | 0.116964 | 0.081323 | 0.186404 | 0.084978 | 0.909174 | 0.902412 | 0.897844 | 0.880848 | 0.871162 | 0.840826 | 0 | 0.0147 | 0.12936 | 7,970 | 202 | 93 | 39.455446 | 0.773887 | 0.079297 | 0 | 0.751553 | 0 | 0 | 0.249075 | 0.003838 | 0 | 0 | 0 | 0 | 0.012422 | 1 | 0.031056 | false | 0 | 0.012422 | 0 | 0.043478 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ed2f2170a1b764f1a4f1d58d37ffe71e3107dedb | 109,452 | py | Python | detectron2/modeling/backbone/fpn.py | c-rbp/panoptic_segmentation | aa212d1d6e851857e0b9563bb94fe7297c987c1a | [
"Apache-2.0"
] | null | null | null | detectron2/modeling/backbone/fpn.py | c-rbp/panoptic_segmentation | aa212d1d6e851857e0b9563bb94fe7297c987c1a | [
"Apache-2.0"
] | 1 | 2021-08-23T08:04:48.000Z | 2021-08-23T08:04:48.000Z | detectron2/modeling/backbone/fpn.py | c-rbp/panoptic_segmentation | aa212d1d6e851857e0b9563bb94fe7297c987c1a | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import math
import fvcore.nn.weight_init as weight_init
import torch
import torch.nn.functional as F
from torch import nn
from detectron2.layers import Conv2d, ShapeSpec, get_norm
from .rnns import hConvGRUCell, hConvGRUCellv2, hConvGRUCellOld, hConvExplGRUCell, tdConvGRUCell, RBPFun, CBP_penalty
from .backbone import Backbone
from .build import BACKBONE_REGISTRY
from .resnet import build_resnet_backbone
from .gn import build_resnet_gn_backbone
from .gnbn import build_resnet_gnbn_backbone
from .gnbn_lowlevel import build_resnet_gnbn_lowlevel_model_backbone
from .gnbn_lowlevel_lesssp import build_resnet_gnbn_lowlevel_lesssp_backbone
from .gnbn_horizontal import build_resnet_gnbn_horizontal_backbone
__all__ = [
"build_resnet_fpngn_backbone",
"build_resnet_fpngn_gala_backbone",
"build_resnet_fpngn_cbp10_backbone",
"build_resnet_fpn_gn_backbone",
"build_resnet_fpn_gnbn_backbone",
"build_resnet_fpn_gnbn_lowlevel_backbone",
"build_resnet_fpnindi_explain_backbone",
"build_resnet_fpnindi_explain_cbp10_backbone",
"build_resnet_fpnindi_explain_post_backbone",
"build_resnet_fpnindi_explain_post_cbp10_backbone",
"build_resnet_fpnindiv2_cbp10_noskip_backbone",
"build_resnet_fpnindiv2_backbone",
"build_resnet_fpnindiv2_cbp10_backbone",
"build_retinanet_resnet_fpn_backbone",
"build_resnet_fpnlateral_backbone",
"build_resnet_fpnlateral_cbp10_backbone",
"build_resnet_fpn_gnbn_horizontal_cbp10_backbone",
"FPNGN",
"FPNLATERAL",
"FPNINDI",
"FPNINDIV2",
"FPNINDIEXPLAIN",
"FPNGNFULL",
"FPN"]
class FPNLATERAL(Backbone):
"""
This module implements Feature Pyramid Network.
It creates pyramid features built on top of some input feature maps.
"""
def __init__(
self,
bottom_up,
in_features,
out_channels,
norm="",
recurrent_norm="GN",
gala=False,
use_skips=True,
top_block=None,
fuse_type="sum",
grad_method='bptt',
gala_version='pre',
h_norm_type="GN",
neumann_iterations=15,
penalty_scale=1.,
memory_mode=False,
timesteps=1):
"""
Args:
bottom_up (Backbone): module representing the bottom up subnetwork.
Must be a subclass of :class:`Backbone`. The multi-scale feature
maps generated by the bottom up network, and listed in `in_features`,
are used to generate FPN levels.
in_features (list[str]): names of the input feature maps coming
from the backbone to which FPN is attached. For example, if the
backbone produces ["res2", "res3", "res4"], any *contiguous* sublist
of these may be used; order must be from high to low resolution.
out_channels (int): number of channels in the output feature maps.
norm (str): the normalization to use.
top_block (nn.Module or None): if provided, an extra operation will
be performed on the output of the last (smallest resolution)
FPN output, and the result will extend the result list. The top_block
further downsamples the feature map. It must have an attribute
"num_levels", meaning the number of extra FPN levels added by
this block, and "in_feature", which is a string representing
its input feature (e.g., p5).
fuse_type (str): types for fusing the top down features and the lateral
ones. It can be "sum" (default), which sums up element-wise; or "avg",
which takes the element-wise mean of the two.
"""
super(FPNLATERAL, self).__init__()
assert isinstance(bottom_up, Backbone)
# Feature map strides and channels from
# the bottom up network (e.g. ResNet)
self.grad_method = grad_method.lower()
self.timesteps = timesteps
self.neumann_iterations = neumann_iterations
self.gala = gala
self.gala_version = gala_version
self.memory_mode = memory_mode
self.use_skips = use_skips
self.penalty_scale = penalty_scale
input_shapes = bottom_up.output_shape()
in_strides = [input_shapes[f].stride for f in in_features]
in_channels = [input_shapes[f].channels for f in in_features]
_assert_strides_are_log2_contiguous(in_strides)
lateral_convs = []
output_convs = []
td_convs = []
td_mapping = []
horizontal_names = [] # TODO: Remove this
self.h_norm_type = h_norm_type
self.horizontal_norms = {}
stages = []
use_bias = norm == ""
for idx, it_in_channels in enumerate(in_channels):
output_norm = get_norm(norm, out_channels)
stage = int(math.log2(in_strides[idx]))
# Horizontal connections
lateral_conv = hConvGRUCellv2(
input_size=it_in_channels,
hidden_size=out_channels,
kernel_size=3,
batchnorm=True,
timesteps=timesteps,
gala=self.gala,
norm=recurrent_norm,
grad_method=self.grad_method
)
self.horizontal_norms["fpn_postnorm{}".format(stage)] = get_norm(h_norm_type, out_channels)
# Feedforward connections
output_conv = Conv2d(
out_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
bias=use_bias,
norm=output_norm,
)
weight_init.c2_xavier_fill(output_conv)
stages += [stage]
self.add_module("fpn_lateral{}".format(stage), lateral_conv)
self.add_module("fpn_recurrent{}".format(stage), output_conv)
self.add_module("fpn_norm_recurrent{}".format(stage), self.horizontal_norms["fpn_postnorm{}".format(stage)])
lateral_convs.append(lateral_conv)
output_convs.append(output_conv)
horizontal_names.append("fpn_recurrent{}".format(stage))
# Place convs into top-down order (from low to high resolution)
# to make the top-down computation in forward clearer.
self.lateral_convs = lateral_convs[::-1]
self.output_convs = output_convs[::-1]
self.horizontal_names = horizontal_names[::-1]
self.stages = stages[::-1]
self.top_block = top_block
self.in_features = in_features
self.bottom_up = bottom_up
# Return feature names are "p<stage>", like ["p2", "p3", ..., "p6"]
self._out_feature_strides = {
"p{}".format(int(math.log2(s))): s for s in in_strides}
# top block output feature maps.
if self.top_block is not None:
for s in range(stage, stage + self.top_block.num_levels):
self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1)
self._out_features = list(self._out_feature_strides.keys())
self._out_feature_channels = {
k: out_channels for k in self._out_features}
self._size_divisibility = in_strides[-1]
assert fuse_type in {"avg", "sum"}
self._fuse_type = fuse_type
@property
def size_divisibility(self):
return self._size_divisibility
def forward(self, x):
"""
Args:
input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to
feature map tensor for each feature level in high to low resolution order.
Returns:
dict[str->Tensor]:
mapping from feature map name to FPN feature map tensor
in high to low resolution order. Returned feature names follow the FPN
paper convention: "p<stage>", where stage has stride = 2 ** stage e.g.,
["p2", "p3", ..., "p6"].
"""
# Reverse feature maps into top-down order (from low to high resolution)
bottom_up_features = self.bottom_up(x)
x = [bottom_up_features[f] for f in self.in_features[::-1]]
results = [[] for _ in range(len(self.stages))]
num_layers = len(self.stages)
hidden_states = {}
lateral_activities = {}
if self.grad_method == 'cbp' or self.grad_method == 'rbp':
results, penalty = self.neumann(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
elif self.grad_method == 'bptt':
results = self.bptt(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
# Finish up
if self.top_block is not None:
top_block_in_feature = bottom_up_features.get(
self.top_block.in_feature, None)
if top_block_in_feature is None:
top_block_in_feature = results[
self._out_features.index(self.top_block.in_feature)]
results.extend(self.top_block(top_block_in_feature))
assert len(self._out_features) == len(results)
out_dict = dict(zip(self._out_features, results))
if self.grad_method == 'cbp' and self.training:
out_dict['cbp_penalty'] = penalty * self.penalty_scale
return out_dict
def neumann(
self,
x,
num_layers,
hidden_states,
lateral_activities,
results):
"""Run neumann RBP."""
if self.timesteps == 1:
raise RuntimeError('Timesteps == 1 for neumann ya weirdo!')
penalty = 0.
for layer_idx, (
features,
lateral_conv,
output_conv,
stage) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.stages)):
it_stage = "fpn_lateral{}".format(stage)
for n in range(self.timesteps - 1):
# Horizontal connections
feature_shape = features.shape
if n == 0:
hidden_states[it_stage] = torch.zeros(
[feature_shape[0],
lateral_conv.hidden_size,
feature_shape[2],
feature_shape[3]]).to(features.device).requires_grad_() # noqa
with torch.no_grad():
hidden_state = lateral_conv(
input_=features,
h_=hidden_states[it_stage])
hidden_states[it_stage] = hidden_state
# Final timestep
prev_state = hidden_state.clone().detach().requires_grad_()
last_state = lateral_conv(
input_=features,
h_=prev_state)
# Do fixed point optim
lower_activity = RBPFun.apply(
prev_state,
last_state,
0,
0,
stage,
self.neumann_iterations)
# Accrue the penalities
if self.training:
penalty = penalty + CBP_penalty(
prev_state=prev_state,
last_state=last_state,
compute_hessian=(self.grad_method == 'cbp'))
# Normalize
if len(self.h_norm_type):
n_stage = "fpn_postnorm{}".format(stage)
lower_activity = self.horizontal_norms[n_stage](lower_activity)
hidden_states[it_stage] = lower_activity
# Do the TD pass
if layer_idx > 0:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
higher_activity = hidden_states[higher_name] # noqa Used to be laterals
higher_activity = F.interpolate(
higher_activity, scale_factor=2, mode="nearest")
skip = higher_activity + lower_activity
if self._fuse_type == "avg":
skip /= 2
else:
skip = lower_activity
results[layer_idx] = output_conv(skip)
results = results[::-1]
return results, penalty
def bptt(
self,
x,
num_layers,
hidden_states,
lateral_activities,
results):
"""Run backprop through time."""
# Now move through remaining layers (with feedback)
for layer_idx, (
features,
lateral_conv,
stage) in enumerate(
zip(
x,
self.lateral_convs,
self.stages)):
# Horizontal connections
stage = "fpn_lateral{}".format(stage)
feature_shape = features.shape
for n in range(self.timesteps):
if n == 0:
hidden_states[stage] = torch.zeros(
[feature_shape[0],
lateral_conv.hidden_size,
feature_shape[2],
feature_shape[3]]).to(features.device).requires_grad_() # noqa
hidden_states[stage] = lateral_conv(
input_=features,
h_=hidden_states[stage])
# Normalize
if len(self.h_norm_type):
hidden_states[stage] = self.horizontal_norms[stage](lower_activity)
if layer_idx > 0:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
higher_activity = hidden_states[higher_name] # noqa hGRU instead of laterals
higher_activity = F.interpolate(
higher_activity, scale_factor=2, mode="nearest")
skip = higher_activity + hidden_states[stage]
if self._fuse_type == "avg":
skip /= 2
else:
skip = lower_activity
results[layer_idx] = output_conv(skip)
results = results[::-1]
return results
def output_shape(self):
return {
name: ShapeSpec(
channels=self._out_feature_channels[name],
stride=self._out_feature_strides[name]
)
for name in self._out_features
}
class FPNINDIV2(Backbone):
"""
This module implements Feature Pyramid Network.
It creates pyramid features built on top of some input feature maps.
"""
def __init__(
self,
bottom_up,
in_features,
out_channels,
norm="",
gala=True,
use_skips=True,
top_block=None,
fuse_type="sum",
grad_method='bptt',
gala_version='pre',
neumann_iterations=-1,
penalty_scale=1.,
memory_mode=False,
timesteps=1):
"""
Args:
bottom_up (Backbone): module representing the bottom up subnetwork.
Must be a subclass of :class:`Backbone`. The multi-scale feature
maps generated by the bottom up network, and listed in `in_features`,
are used to generate FPN levels.
in_features (list[str]): names of the input feature maps coming
from the backbone to which FPN is attached. For example, if the
backbone produces ["res2", "res3", "res4"], any *contiguous* sublist
of these may be used; order must be from high to low resolution.
out_channels (int): number of channels in the output feature maps.
norm (str): the normalization to use.
top_block (nn.Module or None): if provided, an extra operation will
be performed on the output of the last (smallest resolution)
FPN output, and the result will extend the result list. The top_block
further downsamples the feature map. It must have an attribute
"num_levels", meaning the number of extra FPN levels added by
this block, and "in_feature", which is a string representing
its input feature (e.g., p5).
fuse_type (str): types for fusing the top down features and the lateral
ones. It can be "sum" (default), which sums up element-wise; or "avg",
which takes the element-wise mean of the two.
"""
super(FPNINDIV2, self).__init__()
assert isinstance(bottom_up, Backbone)
# Feature map strides and channels from
# the bottom up network (e.g. ResNet)
self.grad_method = grad_method.lower()
self.timesteps = timesteps
self.neumann_iterations = neumann_iterations
self.gala = gala
self.gala_version = gala_version
self.memory_mode = memory_mode
self.use_skips = use_skips
self.penalty_scale = penalty_scale
input_shapes = bottom_up.output_shape()
in_strides = [input_shapes[f].stride for f in in_features]
in_channels = [input_shapes[f].channels for f in in_features]
_assert_strides_are_log2_contiguous(in_strides)
lateral_convs = []
output_convs = []
td_convs = []
td_mapping = []
horizontal_names = [] # TODO: Remove this
stages = []
use_bias = norm == ""
for idx, it_in_channels in enumerate(in_channels):
lateral_norm = get_norm(norm, out_channels)
output_norm = get_norm(norm, out_channels)
# Feedforward connections
lateral_conv = Conv2d(
it_in_channels,
out_channels,
kernel_size=1,
bias=use_bias,
norm=lateral_norm
)
weight_init.c2_xavier_fill(lateral_conv)
# Horizontal connections
output_conv = hConvGRUCell(
input_size=out_channels,
hidden_size=out_channels,
kernel_size=3,
batchnorm=True,
timesteps=timesteps,
gala=self.gala,
norm=norm,
grad_method=self.grad_method)
stage = int(math.log2(in_strides[idx]))
stages += [stage]
self.add_module("fpn_lateral{}".format(stage), lateral_conv)
self.add_module("fpn_recurrent{}".format(stage), output_conv)
lateral_convs.append(lateral_conv)
output_convs.append(output_conv)
horizontal_names.append("fpn_recurrent{}".format(stage))
# Output FF connections
td_conv = Conv2d(
out_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
bias=use_bias,
norm=output_norm)
weight_init.c2_xavier_fill(td_conv)
self.add_module("fpn_topdown{}".format(stage), td_conv)
td_convs.append(td_conv)
td_mapping += [[stage, int(math.log2(in_strides[idx - 1]))]]
# Place convs into top-down order (from low to high resolution)
# to make the top-down computation in forward clearer.
self.lateral_convs = lateral_convs[::-1]
self.output_convs = output_convs[::-1]
self.td_convs = td_convs[::-1]
self.horizontal_names = horizontal_names[::-1]
self.td_mapping = td_mapping[::-1]
self.stages = stages[::-1]
self.top_block = top_block
self.in_features = in_features
self.bottom_up = bottom_up
# Return feature names are "p<stage>", like ["p2", "p3", ..., "p6"]
self._out_feature_strides = {
"p{}".format(int(math.log2(s))): s for s in in_strides}
# top block output feature maps.
if self.top_block is not None:
for s in range(stage, stage + self.top_block.num_levels):
self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1)
self._out_features = list(self._out_feature_strides.keys())
self._out_feature_channels = {
k: out_channels for k in self._out_features}
self._size_divisibility = in_strides[-1]
assert fuse_type in {"avg", "sum"}
self._fuse_type = fuse_type
@property
def size_divisibility(self):
return self._size_divisibility
def forward(self, x):
"""
Args:
input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to
feature map tensor for each feature level in high to low resolution order.
Returns:
dict[str->Tensor]:
mapping from feature map name to FPN feature map tensor
in high to low resolution order. Returned feature names follow the FPN
paper convention: "p<stage>", where stage has stride = 2 ** stage e.g.,
["p2", "p3", ..., "p6"].
"""
# Reverse feature maps into top-down order (from low to high resolution)
bottom_up_features = self.bottom_up(x)
x = [bottom_up_features[f] for f in self.in_features[::-1]]
results = [[] for _ in range(len(self.stages))]
num_layers = len(self.stages)
hidden_states = {}
lateral_activities = {}
if self.grad_method == 'cbp' or self.grad_method == 'rbp':
results, penalty = self.neumann(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
elif self.grad_method == 'bptt':
results = self.bptt(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
# Finish up
if self.top_block is not None:
top_block_in_feature = bottom_up_features.get(
self.top_block.in_feature, None)
if top_block_in_feature is None:
top_block_in_feature = results[
self._out_features.index(self.top_block.in_feature)]
results.extend(self.top_block(top_block_in_feature))
assert len(self._out_features) == len(results)
out_dict = dict(zip(self._out_features, results))
if self.grad_method == 'cbp':
out_dict['cbp_penalty'] = penalty * self.penalty_scale
return out_dict
def neumann(
self,
x,
num_layers,
hidden_states,
lateral_activities,
results):
"""Run neumann RBP."""
if self.timesteps == 1:
raise RuntimeError('Timesteps == 1 for neumann ya weirdo!')
# Now move through remaining layers (with feedback)
penalty = 0.
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs)):
stage = "fpn_lateral{}".format(stage)
with torch.no_grad():
for n in range(self.timesteps - 1):
# FF connections -- only compute once
if n == 0:
with torch.enable_grad():
lateral_features = lateral_conv(features)
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = lateral_features # torch.zeros_like(lateral_features) # noqa
hidden_state = output_conv(
input_=lateral_features,
h_=hidden_states[stage])
hidden_states[stage] = hidden_state
prev_state = hidden_state.clone().detach().requires_grad_()
# Horizontal connections
last_state = output_conv(
input_=lateral_features,
h_=prev_state)
# Do fixed point optim
lower_activity = RBPFun.apply(
prev_state,
last_state,
0,
0,
stage,
self.neumann_iterations)
hidden_states[stage] = lower_activity
# Accrue the penalities
if self.training:
penalty = penalty + CBP_penalty(
prev_state=prev_state,
last_state=last_state,
compute_hessian=(self.grad_method == 'cbp'))
# Do the TD pass
if layer_idx > 0 and self.use_skips:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
higher_activity = hidden_states[higher_name] # noqa Used to be laterals
higher_activity = F.interpolate(
higher_activity, scale_factor=2, mode="nearest")
skip = higher_activity + lower_activity
if self._fuse_type == "avg":
skip /= 2
else:
skip = lower_activity
results[layer_idx] = td_conv(skip)
results = results[::-1]
return results, penalty
def bptt(
self,
x,
num_layers,
hidden_states,
lateral_activities,
results):
"""Run backprop through time."""
# Now move through remaining layers (with feedback)
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs,
self.td_mapping)):
for n in range(self.timesteps):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
if n == 0:
lateral_features = F.softplus(lateral_conv(features))
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = torch.zeros_like(lateral_features) # noqa
recurrent_features = output_conv(
input_=lateral_features,
h_=hidden_states[stage])
hidden_states[stage] = recurrent_features
for layer_idx, (
stage,
td_conv) in enumerate(
zip(
self.stages,
self.td_convs)):
lower_name = 'fpn_lateral{}'.format(
self.stages[layer_idx])
lower_activity = hidden_states[lower_name] # Consider adding a 1x1
if layer_idx > 0:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
higher_activity = hidden_states[higher_name] # noqa hGRU instead of laterals
higher_activity = F.interpolate(
higher_activity, scale_factor=2, mode="nearest")
skip = higher_activity + lower_activity
if self._fuse_type == "avg":
skip /= 2
else:
skip = lower_activity
results[layer_idx] = td_conv(skip)
results = results[::-1]
return results
def output_shape(self):
return {
name: ShapeSpec(
channels=self._out_feature_channels[name],
stride=self._out_feature_strides[name]
)
for name in self._out_features
}
class FPNINDIEXPLAIN(Backbone):
"""
This module implements Feature Pyramid Network.
It creates pyramid features built on top of some input feature maps.
"""
def __init__(
self,
bottom_up,
in_features,
out_channels,
norm="",
gala=True,
top_block=None,
fuse_type="sum",
grad_method='bptt',
gala_version='pre',
neumann_iterations=-1,
memory_mode=False,
timesteps=1):
"""
Args:
bottom_up (Backbone): module representing the bottom up subnetwork.
Must be a subclass of :class:`Backbone`. The multi-scale feature
maps generated by the bottom up network, and listed in `in_features`,
are used to generate FPN levels.
in_features (list[str]): names of the input feature maps coming
from the backbone to which FPN is attached. For example, if the
backbone produces ["res2", "res3", "res4"], any *contiguous* sublist
of these may be used; order must be from high to low resolution.
out_channels (int): number of channels in the output feature maps.
norm (str): the normalization to use.
top_block (nn.Module or None): if provided, an extra operation will
be performed on the output of the last (smallest resolution)
FPN output, and the result will extend the result list. The top_block
further downsamples the feature map. It must have an attribute
"num_levels", meaning the number of extra FPN levels added by
this block, and "in_feature", which is a string representing
its input feature (e.g., p5).
fuse_type (str): types for fusing the top down features and the lateral
ones. It can be "sum" (default), which sums up element-wise; or "avg",
which takes the element-wise mean of the two.
"""
super(FPNINDIEXPLAIN, self).__init__()
assert isinstance(bottom_up, Backbone)
# Feature map strides and channels from
# the bottom up network (e.g. ResNet)
self.grad_method = grad_method.lower()
self.timesteps = timesteps
self.neumann_iterations = neumann_iterations
self.gala = gala
self.gala_version = gala_version
self.memory_mode = memory_mode
input_shapes = bottom_up.output_shape()
in_strides = [input_shapes[f].stride for f in in_features]
in_channels = [input_shapes[f].channels for f in in_features]
_assert_strides_are_log2_contiguous(in_strides)
lateral_convs = []
output_convs = []
td_convs = []
td_mapping = []
horizontal_names = [] # TODO: Remove this
stages = []
use_bias = norm == ""
for idx, it_in_channels in enumerate(in_channels):
lateral_norm = get_norm(norm, out_channels)
output_norm = get_norm(norm, out_channels)
# Feedforward connections
lateral_conv = Conv2d(
it_in_channels,
out_channels,
kernel_size=1,
bias=use_bias,
norm=lateral_norm
)
weight_init.c2_xavier_fill(lateral_conv)
# Horizontal connections
output_conv = hConvExplGRUCell(
input_size=out_channels,
hidden_size=out_channels,
kernel_size=3,
batchnorm=True,
timesteps=timesteps,
gala=self.gala,
version=self.gala_version,
norm=norm,
grad_method=self.grad_method)
stage = int(math.log2(in_strides[idx]))
stages += [stage]
self.add_module("fpn_lateral{}".format(stage), lateral_conv)
self.add_module("fpn_recurrent{}".format(stage), output_conv)
lateral_convs.append(lateral_conv)
output_convs.append(output_conv)
horizontal_names.append("fpn_recurrent{}".format(stage))
# Output FF connections
td_conv = Conv2d(
out_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
bias=use_bias,
norm=output_norm)
weight_init.c2_xavier_fill(td_conv)
self.add_module("fpn_topdown{}".format(stage), td_conv)
td_convs.append(td_conv)
td_mapping += [[stage, int(math.log2(in_strides[idx - 1]))]]
# Place convs into top-down order (from low to high resolution)
# to make the top-down computation in forward clearer.
self.lateral_convs = lateral_convs[::-1]
self.output_convs = output_convs[::-1]
self.td_convs = td_convs[::-1]
self.horizontal_names = horizontal_names[::-1]
self.td_mapping = td_mapping[::-1]
self.stages = stages[::-1]
self.top_block = top_block
self.in_features = in_features
self.bottom_up = bottom_up
# Return feature names are "p<stage>", like ["p2", "p3", ..., "p6"]
self._out_feature_strides = {
"p{}".format(int(math.log2(s))): s for s in in_strides}
# top block output feature maps.
if self.top_block is not None:
for s in range(stage, stage + self.top_block.num_levels):
self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1)
self._out_features = list(self._out_feature_strides.keys())
self._out_feature_channels = {
k: out_channels for k in self._out_features}
self._size_divisibility = in_strides[-1]
assert fuse_type in {"avg", "sum"}
self._fuse_type = fuse_type
@property
def size_divisibility(self):
return self._size_divisibility
def forward(self, x):
"""
Args:
input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to
feature map tensor for each feature level in high to low resolution order.
Returns:
dict[str->Tensor]:
mapping from feature map name to FPN feature map tensor
in high to low resolution order. Returned feature names follow the FPN
paper convention: "p<stage>", where stage has stride = 2 ** stage e.g.,
["p2", "p3", ..., "p6"].
"""
# Reverse feature maps into top-down order (from low to high resolution)
bottom_up_features = self.bottom_up(x)
x = [bottom_up_features[f] for f in self.in_features[::-1]]
results = [[] for _ in range(len(self.stages))]
num_layers = len(self.stages)
hidden_states = {}
explain_states = {}
lateral_activities = {}
if self.grad_method == 'cbp' or self.grad_method == 'rbp':
results, penalty, explanations = self.neumann(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
explain_states=explain_states,
lateral_activities=lateral_activities,
results=results)
elif self.grad_method == 'bptt':
results, explanations = self.bptt(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
explain_states=explain_states,
lateral_activities=lateral_activities,
results=results)
# Finish up
if self.top_block is not None:
top_block_in_feature = bottom_up_features.get(
self.top_block.in_feature, None)
if top_block_in_feature is None:
top_block_in_feature = results[
self._out_features.index(self.top_block.in_feature)]
results.extend(self.top_block(top_block_in_feature))
assert len(self._out_features) == len(results)
out_dict = dict(zip(self._out_features, results))
if self.grad_method == 'cbp' or self.grad_method == 'rbp':
out_dict['cbp_penalty'] = penalty
out_dict['attention_penalty'] = explanations
return out_dict
def neumann(
self,
x,
num_layers,
hidden_states,
explain_states,
lateral_activities,
results):
"""Run neumann RBP."""
if self.timesteps == 1:
raise RuntimeError('Timesteps == 1 for neumann ya weirdo!')
# Now move through remaining layers (with feedback)
with torch.no_grad():
for n in range(self.timesteps - 1):
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
if n == 0:
with torch.enable_grad():
lateral_features = lateral_conv(features)
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = torch.zeros_like(lateral_features) # noqa
explain_states[stage] = torch.zeros_like(lateral_features) + 0.5 # noqa
hidden_states[stage], explain_states[stage] = output_conv(
input_=lateral_features,
h_=hidden_states[stage],
e_=explain_states[stage])
# Detatch and require_grads for each of the hidden_states
# print('hidden', [v.mean().item() for v in hidden_states.values()])
prev_states = {}
for k, v in hidden_states.items():
if not v.requires_grad:
v = v.clone().detach().requires_grad_()
prev_states[k] = v
# Final pass
penalty = 0.
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
lateral_features = lateral_activities[stage]
# Horizontal connections
last_state, explain_states[stage] = output_conv(
input_=lateral_features,
h_=prev_states[stage],
e_=explain_states[stage])
# Do fixed point optim
prev_state = prev_states[stage]
lower_activity = RBPFun.apply(
prev_state,
last_state,
0,
0,
stage,
self.neumann_iterations)
hidden_states[stage] = lower_activity
# Accrue the penalities
if self.training:
penalty = penalty + CBP_penalty(
prev_state=prev_state,
last_state=last_state,
compute_hessian=(self.grad_method == 'cbp'))
# Do the TD pass
if layer_idx > 0:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
higher_activity = hidden_states[higher_name] # noqa Used to be laterals
higher_activity = F.interpolate(
higher_activity, scale_factor=2, mode="nearest")
skip = higher_activity + lower_activity
if self._fuse_type == "avg":
skip /= 2
else:
skip = lower_activity
results[layer_idx] = td_conv(skip)
results = results[::-1]
# Optimize to be as close to 1 as possible
explanations = 0.
for ex in explain_states.values():
explanations = explanations + ((1 - ex) ** 2).mean()
explanations = explanations / (1e-4 + len(explain_states.keys()))
return results, penalty, explanations
def bptt(
self,
x,
num_layers,
hidden_states,
explain_states,
lateral_activities,
results):
"""Run backprop through time."""
# Now move through remaining layers (with feedback)
for n in range(self.timesteps):
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
if n == 0:
lateral_features = lateral_conv(features)
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = torch.zeros_like(lateral_features) # noqa
explain_states[stage] = torch.zeros_like(lateral_features) # noqa
# hidden_states[stage] = F.softplus(lateral_features)
recurrent_features, explanation = output_conv(
input_=lateral_features,
h_=hidden_states[stage],
e_=explain_states[stage])
hidden_states[stage] = recurrent_features
explain_states[stage] = explanation
for layer_idx, (
stage,
td_conv) in enumerate(
zip(
self.stages,
self.td_convs)):
lower_name = 'fpn_lateral{}'.format(
self.stages[layer_idx])
lower_activity = hidden_states[lower_name] # Consider adding a 1x1
if layer_idx > 0:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
higher_activity = hidden_states[higher_name] # noqa hGRU instead of laterals
higher_activity = F.interpolate(
higher_activity, scale_factor=2, mode="nearest")
skip = higher_activity + lower_activity
if self._fuse_type == "avg":
skip /= 2
else:
skip = lower_activity
results[layer_idx] = td_conv(skip)
results = results[::-1]
# Optimize to be as close to 1 as possible
explanations = 0.
for ex in explain_states.values():
explanations = explanations + ((1 - ex) ** 2).mean()
explanations = explanations / (1e-4 + len(explain_states.keys()))
return results, explanations
def output_shape(self):
return {
name: ShapeSpec(
channels=self._out_feature_channels[name],
stride=self._out_feature_strides[name]
)
for name in self._out_features
}
class FPNINDI(Backbone):
"""
This module implements Feature Pyramid Network.
It creates pyramid features built on top of some input feature maps.
"""
def __init__(
self,
bottom_up,
in_features,
out_channels,
norm="",
gala=False,
top_block=None,
fuse_type="sum",
grad_method='bptt',
neumann_iterations=-1,
memory_mode=False,
timesteps=3):
"""
Args:
bottom_up (Backbone): module representing the bottom up subnetwork.
Must be a subclass of :class:`Backbone`. The multi-scale feature
maps generated by the bottom up network, and listed in `in_features`,
are used to generate FPN levels.
in_features (list[str]): names of the input feature maps coming
from the backbone to which FPN is attached. For example, if the
backbone produces ["res2", "res3", "res4"], any *contiguous* sublist
of these may be used; order must be from high to low resolution.
out_channels (int): number of channels in the output feature maps.
norm (str): the normalization to use.
top_block (nn.Module or None): if provided, an extra operation will
be performed on the output of the last (smallest resolution)
FPN output, and the result will extend the result list. The top_block
further downsamples the feature map. It must have an attribute
"num_levels", meaning the number of extra FPN levels added by
this block, and "in_feature", which is a string representing
its input feature (e.g., p5).
fuse_type (str): types for fusing the top down features and the lateral
ones. It can be "sum" (default), which sums up element-wise; or "avg",
which takes the element-wise mean of the two.
"""
super(FPNINDI, self).__init__()
assert isinstance(bottom_up, Backbone)
# Feature map strides and channels from
# the bottom up network (e.g. ResNet)
self.grad_method = grad_method.lower()
self.timesteps = timesteps
self.neumann_iterations = neumann_iterations
self.gala = gala
self.memory_mode = memory_mode
input_shapes = bottom_up.output_shape()
in_strides = [input_shapes[f].stride for f in in_features]
in_channels = [input_shapes[f].channels for f in in_features]
_assert_strides_are_log2_contiguous(in_strides)
lateral_convs = []
output_convs = []
td_convs = []
td_mapping = []
horizontal_names = [] # TODO: Remove this
stages = []
self.conv1x1s = []
use_bias = norm == ""
for idx, it_in_channels in enumerate(in_channels):
lateral_norm = get_norm(norm, out_channels)
output_norm = get_norm(norm, out_channels)
# Feedforward connections
lateral_conv = Conv2d(
it_in_channels,
out_channels,
kernel_size=1,
bias=use_bias,
norm=lateral_norm
)
weight_init.c2_xavier_fill(lateral_conv)
# Horizontal connections
output_conv = hConvGRUCellOld(
input_size=out_channels,
hidden_size=out_channels,
kernel_size=3,
batchnorm=True,
timesteps=timesteps,
gala=self.gala,
norm=norm,
grad_method=self.grad_method)
stage = int(math.log2(in_strides[idx]))
stages += [stage]
self.add_module("fpn_lateral{}".format(stage), lateral_conv)
self.add_module("fpn_recurrent{}".format(stage), output_conv)
lateral_convs.append(lateral_conv)
output_convs.append(output_conv)
horizontal_names.append("fpn_recurrent{}".format(stage))
# TD connections
if idx < (len(in_channels) - 1):
# Treat idx as the higher layer. Mapping is high -> low.
td_conv = tdConvGRUCell(
fan_in=out_channels,
td_fan_in=out_channels,
diff_fan_in=out_channels,
kernel_size=3,
batchnorm=True,
timesteps=timesteps,
norm=norm,
grad_method=self.grad_method)
# the 1x1 convs to make it into logits
conv1x1 = Conv2d(
out_channels,
out_channels,
kernel_size=1,
stride=1,
bias=use_bias,
norm=output_norm)
weight_init.c2_xavier_fill(conv1x1)
self.add_module(
"fpn_topdown_conv1x1_{}".format(stage), conv1x1)
self.conv1x1s.append(conv1x1)
else:
td_conv = Conv2d(
out_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
bias=use_bias,
norm=output_norm)
weight_init.c2_xavier_fill(td_conv)
self.add_module("fpn_topdown{}".format(stage), td_conv)
td_convs.append(td_conv)
td_mapping += [[stage, int(math.log2(in_strides[idx - 1]))]]
# Place convs into top-down order (from low to high resolution)
# to make the top-down computation in forward clearer.
self.lateral_convs = lateral_convs[::-1]
self.output_convs = output_convs[::-1]
self.td_convs = td_convs[::-1]
self.conv1x1s = self.conv1x1s[::-1]
self.horizontal_names = horizontal_names[::-1]
self.td_mapping = td_mapping[::-1]
self.stages = stages[::-1]
self.top_block = top_block
self.in_features = in_features
self.bottom_up = bottom_up
# Return feature names are "p<stage>", like ["p2", "p3", ..., "p6"]
self._out_feature_strides = {
"p{}".format(int(math.log2(s))): s for s in in_strides}
# top block output feature maps.
if self.top_block is not None:
for s in range(stage, stage + self.top_block.num_levels):
self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1)
self._out_features = list(self._out_feature_strides.keys())
self._out_feature_channels = {
k: out_channels for k in self._out_features}
self._size_divisibility = in_strides[-1]
assert fuse_type in {"avg", "sum"}
self._fuse_type = fuse_type
@property
def size_divisibility(self):
return self._size_divisibility
def forward(self, x):
"""
Args:
input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to
feature map tensor for each feature level in high to low resolution order.
Returns:
dict[str->Tensor]:
mapping from feature map name to FPN feature map tensor
in high to low resolution order. Returned feature names follow the FPN
paper convention: "p<stage>", where stage has stride = 2 ** stage e.g.,
["p2", "p3", ..., "p6"].
"""
# Reverse feature maps into top-down order (from low to high resolution)
bottom_up_features = self.bottom_up(x)
x = [bottom_up_features[f] for f in self.in_features[::-1]]
results = [[] for _ in range(len(self.stages))]
num_layers = len(self.stages)
hidden_states = {}
lateral_activities = {}
if self.grad_method == 'cbp' or self.grad_method == 'rbp':
results, penalty = self.neumann(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
elif self.grad_method == 'bptt':
results = self.bptt(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
# Finish up
if self.top_block is not None:
top_block_in_feature = bottom_up_features.get(
self.top_block.in_feature, None)
if top_block_in_feature is None:
top_block_in_feature = results[
self._out_features.index(self.top_block.in_feature)]
results.extend(self.top_block(top_block_in_feature))
assert len(self._out_features) == len(results)
out_dict = dict(zip(self._out_features, results))
if self.grad_method == 'cbp' and self.training:
out_dict['penalty'] = penalty
return out_dict
def neumann(self, x, num_layers, hidden_states, lateral_activities, results):
"""Run neumann RBP."""
# Now move through remaining layers (with feedback)
with torch.no_grad():
for n in range(self.timesteps - 1):
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
if n == 0:
with torch.enable_grad():
lateral_features = lateral_conv(features)
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = torch.zeros_like(lateral_features) # noqa
# hidden_states[stage] = F.softplus(lateral_features)
recurrent_features = output_conv(
input_=lateral_features,
h_=hidden_states[stage])
hidden_states[stage] = recurrent_features
# Detatch and require_grads for each of the hidden_states
#print('hidden', [v.mean().item() for v in hidden_states.values()])
prev_states = {}
for k, v in hidden_states.items():
if not v.requires_grad:
v = v.clone().detach().requires_grad_()
prev_states[k] = v
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
lateral_features = lateral_activities[stage]
# Horizontal connections
recurrent_features = output_conv(
input_=lateral_features,
h_=prev_states[stage])
hidden_states[stage] = recurrent_features
# Compute jacobians from top-to-bottom
penalty = 0.
# print('hidden2', [v.mean().item() for v in hidden_states.values()])
# print('prev2', [v.mean().item() for v in prev_states.values()])
# print('\n')
for layer_idx, stage in enumerate(self.stages[1:]):
corrected_idx = stage - 2
stage_name = 'fpn_lateral{}'.format(stage)
prev_state = prev_states[stage_name]
last_state = hidden_states[stage_name]
internal_state = RBPFun.apply(
prev_state,
last_state,
0,
0,
stage_name,
self.neumann_iterations)
results[corrected_idx] = internal_state
if self.memory_mode:
raise NotImplementedError(
'Need one more Top-Down pass here.')
# Accrue the penalities
if self.training:
penalty = penalty + CBP_penalty(
last_state=last_state,
prev_state=prev_state,
compute_hessian=(self.grad_method == 'cbp'))
# One last pass for Output convs/TDs
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
lateral_features = lateral_activities[stage]
# Replace their TD with ours
# Gather activity from one layer above
if layer_idx > 0:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
# higher_activity = hidden_states[higher_name]
higher_activity = lateral_activities[higher_name]
prev_features = td_conv(
lower_=recurrent_features,
higher_=higher_activity)
if not self.memory_mode:
hidden_states[stage] = prev_features
results[layer_idx] = prev_features
print('TD {} -> {}'.format(higher_name, stage))
else:
results[layer_idx] = td_conv(recurrent_features)
results = results[::-1]
return results, penalty
def bptt(self, x, num_layers, hidden_states, lateral_activities, results):
"""Run backprop through time."""
# Now move through remaining layers (with feedback)
for n in range(self.timesteps - 1):
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
if n == 0:
lateral_features = lateral_conv(features)
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = torch.zeros_like(lateral_features) # noqa
# hidden_states[stage] = F.softplus(lateral_features)
recurrent_features = output_conv(
input_=lateral_features,
h_=hidden_states[stage])
hidden_states[stage] = recurrent_features
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x,
self.lateral_convs,
self.output_convs,
self.horizontal_names,
self.stages,
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
lateral_features = lateral_activities[stage]
# Horizontal connections
recurrent_features = output_conv(
input_=lateral_features,
h_=hidden_states[stage])
hidden_states[stage] = recurrent_features
# Replace their TD with ours
# Gather activity from one layer above
if layer_idx > 0:
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx - 1])
# higher_activity = hidden_states[higher_name]
higher_activity = lateral_activities[higher_name]
prev_features = td_conv(
lower_=recurrent_features,
higher_=higher_activity)
if not self.memory_mode:
hidden_states[stage] = prev_features
results[layer_idx] = self.conv1x1s[layer_idx - 1](prev_features)
#print('TD {} -> {}'.format(higher_name, stage))
else:
results[layer_idx] = td_conv(recurrent_features)
results = results[::-1]
return results
def output_shape(self):
return {
name: ShapeSpec(
channels=self._out_feature_channels[name],
stride=self._out_feature_strides[name]
)
for name in self._out_features
}
class FPNGN(Backbone):
"""
This module implements Feature Pyramid Network.
It creates pyramid features built on top of some input feature maps.
"""
def __init__(
self,
bottom_up,
in_features,
out_channels,
norm="",
gala=False,
top_block=None,
fuse_type="sum",
grad_method='bptt',
neumann_iterations=15,
memory_mode=False,
timesteps=3):
"""
Args:
bottom_up (Backbone): module representing the bottom up subnetwork.
Must be a subclass of :class:`Backbone`. The multi-scale feature
maps generated by the bottom up network, and listed in `in_features`,
are used to generate FPN levels.
in_features (list[str]): names of the input feature maps coming
from the backbone to which FPN is attached. For example, if the
backbone produces ["res2", "res3", "res4"], any *contiguous* sublist
of these may be used; order must be from high to low resolution.
out_channels (int): number of channels in the output feature maps.
norm (str): the normalization to use.
top_block (nn.Module or None): if provided, an extra operation will
be performed on the output of the last (smallest resolution)
FPN output, and the result will extend the result list. The top_block
further downsamples the feature map. It must have an attribute
"num_levels", meaning the number of extra FPN levels added by
this block, and "in_feature", which is a string representing
its input feature (e.g., p5).
fuse_type (str): types for fusing the top down features and the lateral
ones. It can be "sum" (default), which sums up element-wise; or "avg",
which takes the element-wise mean of the two.
"""
super(FPNGN, self).__init__()
assert isinstance(bottom_up, Backbone)
# Feature map strides and channels from
# the bottom up network (e.g. ResNet)
self.grad_method = grad_method.lower()
self.timesteps = timesteps
self.neumann_iterations = neumann_iterations
self.gala = gala
self.memory_mode = memory_mode
input_shapes = bottom_up.output_shape()
in_strides = [input_shapes[f].stride for f in in_features]
in_channels = [input_shapes[f].channels for f in in_features]
_assert_strides_are_log2_contiguous(in_strides)
lateral_convs = []
output_convs = []
td_convs = []
td_mapping = []
horizontal_names = []
stages = []
use_bias = norm == ""
for idx, it_in_channels in enumerate(in_channels):
lateral_norm = get_norm(norm, out_channels)
output_norm = get_norm(norm, out_channels)
# Feedforward connections
lateral_conv = Conv2d(
it_in_channels,
out_channels,
kernel_size=1,
bias=use_bias,
norm=lateral_norm
)
weight_init.c2_xavier_fill(lateral_conv)
if idx < (len(in_channels) - 1):
# Horizontal connections
output_conv = hConvGRUCellOld(
input_size=out_channels,
hidden_size=out_channels,
kernel_size=3,
batchnorm=True,
timesteps=timesteps,
gala=self.gala,
norm=norm,
grad_method=self.grad_method)
else:
# Because the last layer is handled specially
output_conv = Conv2d(
out_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
bias=use_bias,
norm=output_norm)
weight_init.c2_xavier_fill(output_conv)
stage = int(math.log2(in_strides[idx]))
stages += [stage]
self.add_module("fpn_lateral{}".format(stage), lateral_conv)
self.add_module("fpn_output{}".format(stage), output_conv)
lateral_convs.append(lateral_conv)
output_convs.append(output_conv)
horizontal_names.append("fpn_lateral{}".format(stage))
# TD connections
if idx > 0:
# Treat idx as the higher layer. Mapping is high -> low.
td_conv = tdConvGRUCell(
fan_in=out_channels,
td_fan_in=out_channels,
diff_fan_in=out_channels,
kernel_size=1,
batchnorm=True,
timesteps=timesteps,
norm=norm,
grad_method=self.grad_method)
self.add_module("fpn_topdown{}".format(stage), td_conv)
td_convs.append(td_conv)
td_mapping += [[stage, int(math.log2(in_strides[idx - 1]))]]
# Place convs into top-down order (from low to high resolution)
# to make the top-down computation in forward clearer.
self.lateral_convs = lateral_convs[::-1]
self.output_convs = output_convs[::-1]
self.td_convs = td_convs[::-1]
self.horizontal_names = horizontal_names[::-1]
self.td_mapping = td_mapping[::-1]
self.stages = stages[::-1]
self.top_block = top_block
self.in_features = in_features
self.bottom_up = bottom_up
# Return feature names are "p<stage>", like ["p2", "p3", ..., "p6"]
self._out_feature_strides = {
"p{}".format(int(math.log2(s))): s for s in in_strides}
# top block output feature maps.
if self.top_block is not None:
for s in range(stage, stage + self.top_block.num_levels):
self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1)
self._out_features = list(self._out_feature_strides.keys())
self._out_feature_channels = {
k: out_channels for k in self._out_features}
self._size_divisibility = in_strides[-1]
assert fuse_type in {"avg", "sum"}
self._fuse_type = fuse_type
@property
def size_divisibility(self):
return self._size_divisibility
def forward(self, x):
"""
Args:
input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to
feature map tensor for each feature level in high to low resolution order.
Returns:
dict[str->Tensor]:
mapping from feature map name to FPN feature map tensor
in high to low resolution order. Returned feature names follow the FPN
paper convention: "p<stage>", where stage has stride = 2 ** stage e.g.,
["p2", "p3", ..., "p6"].
"""
# Reverse feature maps into top-down order (from low to high resolution)
bottom_up_features = self.bottom_up(x)
x = [bottom_up_features[f] for f in self.in_features[::-1]]
results = [[] for _ in range(len(self.stages))]
num_layers = len(self.stages)
hidden_states = {}
lateral_activities = {}
# Run transformation on highest layer
prev_features = self.lateral_convs[0](x[0])
# if self.debug:
# results.insert(0, self.output_convs[0](prev_features))
#else:
results[-1] = self.output_convs[0](prev_features) # noqa Eventually convert this to recurrent
stage = "fpn_lateral{}".format(self.stages[0])
hidden_states[stage] = prev_features
if self.grad_method == 'cbp' or self.grad_method == 'rbp':
results, penalty = self.neumann(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
elif self.grad_method == 'bptt':
results = self.bptt(
x=x,
num_layers=num_layers,
hidden_states=hidden_states,
lateral_activities=lateral_activities,
results=results)
# Finish up
if self.top_block is not None:
top_block_in_feature = bottom_up_features.get(
self.top_block.in_feature, None)
if top_block_in_feature is None:
top_block_in_feature = results[
self._out_features.index(self.top_block.in_feature)]
results.extend(self.top_block(top_block_in_feature))
assert len(self._out_features) == len(results)
out_dict = dict(zip(self._out_features, results))
if self.grad_method == 'cbp' and self.training:
out_dict['penalty'] = penalty
return out_dict
def neumann(self, x, num_layers, hidden_states, lateral_activities, results):
"""Run neumann RBP."""
# Now move through remaining layers (with feedback)
with torch.no_grad():
for n in range(self.timesteps - 1):
#print('hidden', n, [v.mean().item() for v in hidden_states.values()])
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x[1:],
self.lateral_convs[1:],
self.output_convs[1:],
self.horizontal_names[1:],
self.stages[1:],
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
if n == 0:
with torch.enable_grad():
lateral_features = lateral_conv(features)
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = torch.zeros_like(lateral_features) # noqa
# hidden_states[stage] = F.softplus(lateral_features)
lateral_features = output_conv(
input_=lateral_features,
h_=hidden_states[stage])
hidden_states[stage] = lateral_features
# Replace their TD with ours
# Gather activity from one layer above
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx])
higher_activity = hidden_states[higher_name]
prev_features = td_conv(
lower_=lateral_features,
higher_=higher_activity)
hidden_states[stage] = prev_features
# TODO: Add top-block below to recurrent loop
# Detatch and require_grads for each of the hidden_states
#print('hidden', [v.mean().item() for v in hidden_states.values()])
prev_states = {}
for k, v in hidden_states.items():
if not v.requires_grad:
v = v.clone().detach().requires_grad_()
prev_states[k] = v
#print('prev', [v.mean().item() for v in prev_states.values()])
# Compute last timestep and update hidden_states again
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x[1:],
self.lateral_convs[1:],
self.output_convs[1:],
self.horizontal_names[1:],
self.stages[1:],
self.td_convs,
self.td_mapping)):
# FF connections -- only compute once
corrected_idx = stage - 2
stage = "fpn_lateral{}".format(stage)
lateral_features = lateral_activities[stage]
# Horizontal connections
lateral_features = output_conv(
input_=lateral_features,
h_=prev_states[stage])
hidden_states[stage] = lateral_features
# Replace their TD with ours
# Gather activity from one layer above
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx])
# print(stage, higher_name)
higher_activity = hidden_states[higher_name]
prev_features = td_conv(
lower_=lateral_features,
higher_=higher_activity)
if not self.memory_mode:
hidden_states[stage] = prev_features
# Compute jacobians from top-to-bottom
penalty = 0.
# print('hidden2', [v.mean().item() for v in hidden_states.values()])
# print('prev2', [v.mean().item() for v in prev_states.values()])
# print('\n')
for layer_idx, stage in enumerate(self.stages[1:]):
corrected_idx = stage - 2
stage_name = 'fpn_lateral{}'.format(stage)
prev_state = prev_states[stage_name]
last_state = hidden_states[stage_name]
internal_state = RBPFun.apply(
prev_state,
last_state,
0,
0,
stage_name,
self.neumann_iterations)
results[corrected_idx] = internal_state
if self.memory_mode:
raise NotImplementedError(
'Need one more Top-Down pass here.')
# Accrue the penalities
if self.training:
penalty = penalty + CBP_penalty(
last_state=last_state,
prev_state=prev_state,
compute_hessian=(self.grad_method == 'cbp'))
return results, penalty
def bptt(self, x, num_layers, hidden_states, lateral_activities, results):
"""Run backprop through time."""
# Now move through remaining layers (with feedback)
for n in range(self.timesteps):
for layer_idx, (
features,
lateral_conv,
output_conv,
h_name,
stage,
td_conv,
td_map) in enumerate(
zip(
x[1:],
self.lateral_convs[1:],
self.output_convs[1:],
self.horizontal_names[1:],
self.stages[1:],
self.td_convs,
self.td_mapping)):
corrected_idx = num_layers - layer_idx - 2
# FF connections -- only compute once
stage = "fpn_lateral{}".format(stage)
if n == 0:
lateral_features = lateral_conv(features)
lateral_activities[stage] = lateral_features
else:
lateral_features = lateral_activities[stage]
# Horizontal connections
if n == 0:
hidden_states[stage] = torch.zeros_like(lateral_features) # noqa
# hidden_states[stage] = F.softplus(lateral_features)
lateral_features = output_conv(
input_=lateral_features,
h_=hidden_states[stage])
hidden_states[stage] = lateral_features
# Replace their TD with ours
# Gather activity from one layer above
higher_name = 'fpn_lateral{}'.format(
self.stages[layer_idx])
higher_activity = hidden_states[higher_name]
prev_features = td_conv(
lower_=lateral_features,
higher_=higher_activity)
if not self.memory_mode:
hidden_states[stage] = prev_features
results[corrected_idx] = prev_features
# TODO: Add top-block below to recurrent loop
return results
def output_shape(self):
return {
name: ShapeSpec(
channels=self._out_feature_channels[name],
stride=self._out_feature_strides[name]
)
for name in self._out_features
}
class FPN(Backbone):
"""
This module implements Feature Pyramid Network.
It creates pyramid features built on top of some input feature maps.
"""
def __init__(
self,
bottom_up,
in_features,
out_channels,
norm="",
penalty_scale=None,
top_block=None,
fuse_type="sum"
):
"""
Args:
bottom_up (Backbone): module representing the bottom up subnetwork.
Must be a subclass of :class:`Backbone`. The multi-scale feature
maps generated by the bottom up network, and listed in `in_features`,
are used to generate FPN levels.
in_features (list[str]): names of the input feature maps coming
from the backbone to which FPN is attached. For example, if the
backbone produces ["res2", "res3", "res4"], any *contiguous* sublist
of these may be used; order must be from high to low resolution.
out_channels (int): number of channels in the output feature maps.
norm (str): the normalization to use.
top_block (nn.Module or None): if provided, an extra operation will
be performed on the output of the last (smallest resolution)
FPN output, and the result will extend the result list. The top_block
further downsamples the feature map. It must have an attribute
"num_levels", meaning the number of extra FPN levels added by
this block, and "in_feature", which is a string representing
its input feature (e.g., p5).
fuse_type (str): types for fusing the top down features and the lateral
ones. It can be "sum" (default), which sums up element-wise; or "avg",
which takes the element-wise mean of the two.
"""
super(FPN, self).__init__()
assert isinstance(bottom_up, Backbone)
self.penalty_scale = penalty_scale
# Feature map strides and channels from the bottom up network (e.g. ResNet)
input_shapes = bottom_up.output_shape()
in_strides = [input_shapes[f].stride for f in in_features]
in_channels = [input_shapes[f].channels for f in in_features]
_assert_strides_are_log2_contiguous(in_strides)
lateral_convs = []
output_convs = []
use_bias = norm == ""
for idx, it_in_channels in enumerate(in_channels):
lateral_norm = get_norm(norm, out_channels)
output_norm = get_norm(norm, out_channels)
lateral_conv = Conv2d(
it_in_channels, out_channels, kernel_size=1, bias=use_bias, norm=lateral_norm
)
output_conv = Conv2d(
out_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
bias=use_bias,
norm=output_norm,
)
weight_init.c2_xavier_fill(lateral_conv)
weight_init.c2_xavier_fill(output_conv)
stage = int(math.log2(in_strides[idx]))
self.add_module("fpn_lateral{}".format(stage), lateral_conv)
self.add_module("fpn_output{}".format(stage), output_conv)
lateral_convs.append(lateral_conv)
output_convs.append(output_conv)
# Place convs into top-down order (from low to high resolution)
# to make the top-down computation in forward clearer.
self.lateral_convs = lateral_convs[::-1]
self.output_convs = output_convs[::-1]
self.top_block = top_block
self.in_features = in_features
self.bottom_up = bottom_up
# Return feature names are "p<stage>", like ["p2", "p3", ..., "p6"]
self._out_feature_strides = {"p{}".format(int(math.log2(s))): s for s in in_strides}
# top block output feature maps.
if self.top_block is not None:
for s in range(stage, stage + self.top_block.num_levels):
self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1)
self._out_features = list(self._out_feature_strides.keys())
self._out_feature_channels = {k: out_channels for k in self._out_features}
self._size_divisibility = in_strides[-1]
assert fuse_type in {"avg", "sum"}
self._fuse_type = fuse_type
@property
def size_divisibility(self):
return self._size_divisibility
def forward(self, x):
"""
Args:
input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to
feature map tensor for each feature level in high to low resolution order.
Returns:
dict[str->Tensor]:
mapping from feature map name to FPN feature map tensor
in high to low resolution order. Returned feature names follow the FPN
paper convention: "p<stage>", where stage has stride = 2 ** stage e.g.,
["p2", "p3", ..., "p6"].
"""
# Reverse feature maps into top-down order (from low to high resolution)
if self.penalty_scale is not None:
bottom_up_features, penalty = self.bottom_up(x)
else:
bottom_up_features = self.bottom_up(x)
x = [bottom_up_features[f] for f in self.in_features[::-1]]
results = []
prev_features = self.lateral_convs[0](x[0])
results.append(self.output_convs[0](prev_features))
for features, lateral_conv, output_conv in zip(
x[1:], self.lateral_convs[1:], self.output_convs[1:]
):
top_down_features = F.interpolate(prev_features, scale_factor=2, mode="nearest")
lateral_features = lateral_conv(features)
prev_features = lateral_features + top_down_features
if self._fuse_type == "avg":
prev_features /= 2
results.insert(0, output_conv(prev_features))
if self.top_block is not None:
top_block_in_feature = bottom_up_features.get(self.top_block.in_feature, None)
if top_block_in_feature is None:
top_block_in_feature = results[self._out_features.index(self.top_block.in_feature)]
results.extend(self.top_block(top_block_in_feature))
assert len(self._out_features) == len(results)
out_dict = dict(zip(self._out_features, results))
if self.penalty_scale is not None:
out_dict['cbp_penalty'] = penalty * self.penalty_scale
return out_dict
def output_shape(self):
return {
name: ShapeSpec(
channels=self._out_feature_channels[name], stride=self._out_feature_strides[name]
)
for name in self._out_features
}
def _assert_strides_are_log2_contiguous(strides):
"""
Assert that each stride is 2x times its preceding stride, i.e. "contiguous in log2".
"""
for i, stride in enumerate(strides[1:], 1):
assert stride == 2 * strides[i - 1], "Strides {} {} are not log2 contiguous".format(
stride, strides[i - 1]
)
class LastLevelMaxPool(nn.Module):
"""
This module is used in the original FPN to generate a downsampled
P6 feature from P5.
"""
def __init__(self):
super().__init__()
self.num_levels = 1
self.in_feature = "p5"
def forward(self, x):
return [F.max_pool2d(x, kernel_size=1, stride=2, padding=0)]
class LastLevelP6P7(nn.Module):
"""
This module is used in RetinaNet to generate extra layers, P6 and P7 from
C5 feature.
"""
def __init__(self, in_channels, out_channels):
super().__init__()
self.num_levels = 2
self.in_feature = "res5"
self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1)
self.p7 = nn.Conv2d(out_channels, out_channels, 3, 2, 1)
for module in [self.p6, self.p7]:
weight_init.c2_xavier_fill(module)
def forward(self, c5):
p6 = self.p6(c5)
p7 = self.p7(F.relu(p6))
return [p6, p7]
class LastLevelP6P7GN(nn.Module):
"""
This module is used in RetinaNet to generate extra layers, P6 and P7 from
C5 feature.
"""
def __init__(self, in_channels, out_channels):
super().__init__()
self.num_levels = 2
self.in_feature = "res5"
self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1)
self.p7 = nn.Conv2d(out_channels, out_channels, 3, 2, 1)
for module in [self.p6, self.p7]:
weight_init.c2_xavier_fill(module)
def forward(self, c5):
p6 = self.p6(c5)
p7 = self.p7(F.softplus(p6))
return [p6, p7]
@BACKBONE_REGISTRY.register()
def build_resnet_fpngn_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNGN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindi_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDI(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindi_explain_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDIEXPLAIN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
gala_version='pre',
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindi_explain_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDIEXPLAIN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
gala_version='pre',
timesteps=5, # 10,
grad_method='cbp',
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindi_explain_post_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDIV2(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
# gala_version='post',
timesteps=5,
grad_method='cbp',
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindiv2_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDIV2(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
# gala_version='post',
timesteps=10,
grad_method='cbp',
top_block=LastLevelMaxPool(),
penalty_scale=1e-2,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindiv2_cbp10_noskip_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDIV2(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=False,
timesteps=10,
use_skips=False,
grad_method='cbp',
top_block=LastLevelMaxPool(),
penalty_scale=1e-2,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnlateral_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNLATERAL(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=False,
timesteps=10,
use_skips=True,
grad_method='cbp',
top_block=LastLevelMaxPool(),
penalty_scale=1e-2,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnlateral_cbp20_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNLATERAL(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
timesteps=20,
use_skips=True,
grad_method='cbp',
top_block=LastLevelMaxPool(),
penalty_scale=1,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnlateral_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNLATERAL(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=False,
timesteps=10,
use_skips=True,
grad_method='cbp',
top_block=LastLevelMaxPool(),
penalty_scale=1e-1,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindiv2_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDIV2(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
# gala_version='post',
timesteps=1,
grad_method='bptt',
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindi_explain_post_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDIEXPLAIN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
gala_version='post',
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpnindi_gala_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNINDI(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
gala=True,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpngn_gala_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNGN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=True,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpngn_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPNGN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
gala=False,
grad_method='cbp',
timesteps=10,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gn_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gn_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gnbn_lowlevel_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gnbn_lowlevel_model_backbone(cfg, input_shape, grad_method='bptt', timesteps=1)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gnbn_lowlevel_bptt3_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gnbn_lowlevel_model_backbone(cfg, input_shape, grad_method='bptt', timesteps=3)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gnbn_lowlevel_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gnbn_lowlevel_model_backbone(cfg, input_shape, grad_method='cbp', timesteps=10)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
penalty_scale=1e-1,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gnbn_lowlevel_gala_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gnbn_lowlevel_model_backbone(cfg, input_shape, grad_method='cbp', timesteps=10, gala=True)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
penalty_scale=1e-1,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gnbn_lowlevel_lesssp_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gnbn_lowlevel_lesssp_backbone(
cfg,
input_shape,
grad_method='cbp',
timesteps=6,
gala=True)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
penalty_scale=0.1,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gnbn_horizontal_cbp10_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gnbn_horizontal_backbone(
cfg,
input_shape,
grad_method='cbp',
timesteps=6,
gala=True)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
penalty_scale=0.1,
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_resnet_fpn_gnbn_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_gnbn_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelMaxPool(),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_retinanet_resnet_fpn_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
in_channels_p6p7 = bottom_up.output_shape()["res5"].channels
backbone = FPN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelP6P7(in_channels_p6p7, out_channels),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
@BACKBONE_REGISTRY.register()
def build_retinanet_resnet_fpngn_backbone(cfg, input_shape: ShapeSpec):
"""
Args:
cfg: a detectron2 CfgNode
Returns:
backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
"""
bottom_up = build_resnet_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
in_channels_p6p7 = bottom_up.output_shape()["res5"].channels
backbone = FPNGN(
bottom_up=bottom_up,
in_features=in_features,
out_channels=out_channels,
norm=cfg.MODEL.FPN.NORM,
top_block=LastLevelP6P7GN(in_channels_p6p7, out_channels),
fuse_type=cfg.MODEL.FPN.FUSE_TYPE,
)
return backbone
| 37.96462 | 120 | 0.563197 | 12,123 | 109,452 | 4.833292 | 0.033243 | 0.033416 | 0.019524 | 0.020787 | 0.952043 | 0.937007 | 0.928116 | 0.916391 | 0.912619 | 0.909598 | 0 | 0.008654 | 0.35389 | 109,452 | 2,882 | 121 | 37.977793 | 0.819904 | 0.202609 | 0 | 0.88623 | 0 | 0 | 0.024915 | 0.007631 | 0 | 0 | 0 | 0.001735 | 0.012695 | 1 | 0.032715 | false | 0.000977 | 0.007324 | 0.006348 | 0.072266 | 0.000488 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed5a7fc7bb9b2a2eaad858b7acdd1a0df82a63d4 | 24,549 | py | Python | batchgenerators/transforms/spatial_transforms.py | tor4z/batchgenerators | 08f9ab16d1cfca843500bae2eb54323c79526aa0 | [
"Apache-2.0"
] | null | null | null | batchgenerators/transforms/spatial_transforms.py | tor4z/batchgenerators | 08f9ab16d1cfca843500bae2eb54323c79526aa0 | [
"Apache-2.0"
] | null | null | null | batchgenerators/transforms/spatial_transforms.py | tor4z/batchgenerators | 08f9ab16d1cfca843500bae2eb54323c79526aa0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Division of Medical Image Computing, German Cancer Research Center (DKFZ)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from batchgenerators.transforms.abstract_transforms import AbstractTransform
from batchgenerators.augmentations.spatial_transformations import augment_spatial, augment_spatial_2, \
augment_channel_translation, \
augment_mirroring, augment_transpose_axes, augment_zoom, augment_resize, augment_rot90
import numpy as np
class Rot90Transform(AbstractTransform):
def __init__(self, num_rot=(1, 2, 3), axes=(0, 1, 2), data_key="data", label_key="seg", p_per_sample=0.3):
"""
:param num_rot: rotate by 90 degrees how often? must be tuple -> nom rot randomly chosen from that tuple
:param axes: around which axes will the rotation take place? two axes are chosen randomly from axes.
:param data_key:
:param label_key:
:param p_per_sample:
"""
self.p_per_sample = p_per_sample
self.label_key = label_key
self.data_key = data_key
self.axes = axes
self.num_rot = num_rot
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
seg = data_dict.get(self.label_key)
for b in range(data.shape[0]):
if np.random.uniform() < self.p_per_sample:
d = data[b]
if seg is not None:
s = seg[b]
else:
s = None
d, s = augment_rot90(d, s, self.num_rot, self.axes)
data[b] = d
if s is not None:
seg[b] = s
data_dict[self.data_key] = data
if seg is not None:
data_dict[self.label_key] = seg
return data_dict
class ZoomTransform(AbstractTransform):
def __init__(self, zoom_factors=1, order=3, order_seg=1, cval_seg=0, concatenate_list=False, data_key="data",
label_key="seg"):
"""
Zooms 'data' (and 'seg') by zoom_factors
:param zoom_factors: int or list/tuple of int
:param order: interpolation order for data (see skimage.transform.resize)
:param order_seg: interpolation order for seg (see skimage.transform.resize)
:param cval_seg: cval for segmentation (see skimage.transform.resize)
:param seg: can be None, if not None then it will also be zoomed by zoom_factors. Can also be list/tuple of
np.ndarray (just like data). Must also be (b, c, x, y(, z))
:param concatenate_list: if you give list/tuple of data/seg and set concatenate_list=True then the result will be
concatenated into one large ndarray (once again b, c, x, y(, z))
:param data_key:
:param label_key:
"""
self.concatenate_list = concatenate_list
self.cval_seg = cval_seg
self.order_seg = order_seg
self.data_key = data_key
self.label_key = label_key
self.order = order
self.zoom_factors = zoom_factors
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
seg = data_dict.get(self.label_key)
if isinstance(data, np.ndarray):
concatenate = True
else:
concatenate = self.concatenate_list
if seg is not None:
if isinstance(seg, np.ndarray):
concatenate_seg = True
else:
concatenate_seg = self.concatenate_list
else:
concatenate_seg = None
results = []
for b in range(len(data)):
sample_seg = None
if seg is not None:
sample_seg = seg[b]
res_data, res_seg = augment_zoom(data[b], sample_seg, self.zoom_factors, self.order, self.order_seg,
self.cval_seg)
results.append((res_data, res_seg))
if concatenate:
data = np.vstack([i[0][None] for i in results])
if concatenate_seg is not None and concatenate_seg:
seg = np.vstack([i[1][None] for i in results])
data_dict[self.data_key] = data
if seg is not None:
data_dict[self.label_key] = seg
return data_dict
class ResizeTransform(AbstractTransform):
def __init__(self, target_size, order=3, order_seg=1, cval_seg=0, concatenate_list=False, data_key="data",
label_key="seg"):
"""
Reshapes 'data' (and 'seg') to target_size
:param target_size: int or list/tuple of int
:param order: interpolation order for data (see skimage.transform.resize)
:param order_seg: interpolation order for seg (see skimage.transform.resize)
:param cval_seg: cval for segmentation (see skimage.transform.resize)
:param seg: can be None, if not None then it will also be resampled to target_size. Can also be list/tuple of
np.ndarray (just like data). Must also be (b, c, x, y(, z))
:param concatenate_list: if you give list/tuple of data/seg and set concatenate_list=True then the result will be
concatenated into one large ndarray (once again b, c, x, y(, z))
:param data_key:
:param label_key:
"""
self.concatenate_list = concatenate_list
self.cval_seg = cval_seg
self.order_seg = order_seg
self.data_key = data_key
self.label_key = label_key
self.order = order
self.target_size = target_size
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
seg = data_dict.get(self.label_key)
if isinstance(data, np.ndarray):
concatenate = True
else:
concatenate = self.concatenate_list
if seg is not None:
if isinstance(seg, np.ndarray):
concatenate_seg = True
else:
concatenate_seg = self.concatenate_list
else:
concatenate_seg = None
results = []
for b in range(len(data)):
sample_seg = None
if seg is not None:
sample_seg = seg[b]
res_data, res_seg = augment_resize(data[b], sample_seg, self.target_size, self.order, self.order_seg,
self.cval_seg)
results.append((res_data, res_seg))
if concatenate:
data = np.vstack([i[0][None] for i in results])
if concatenate_seg is not None and concatenate_seg:
seg = np.vstack([i[1][None] for i in results])
data_dict[self.data_key] = data
if seg is not None:
data_dict[self.label_key] = seg
return data_dict
class MirrorTransform(AbstractTransform):
""" Randomly mirrors data along specified axes. Mirroring is evenly distributed. Probability of mirroring along
each axis is 0.5
Args:
axes (tuple of int): axes along which to mirror
"""
def __init__(self, axes=(0, 1, 2), data_key="data", label_key="seg", p_per_sample=1):
self.p_per_sample = p_per_sample
self.data_key = data_key
self.label_key = label_key
self.axes = axes
if max(axes) > 2:
raise ValueError("MirrorTransform now takes the axes as the spatial dimensions. What previously was "
"axes=(2, 3, 4) to mirror along all spatial dimensions of a 5d tensor (b, c, x, y, z) "
"is now axes=(0, 1, 2). Please adapt your scripts accordingly.")
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
seg = data_dict.get(self.label_key)
for b in range(len(data)):
if np.random.uniform() < self.p_per_sample:
sample_seg = None
if seg is not None:
sample_seg = seg[b]
ret_val = augment_mirroring(data[b], sample_seg, axes=self.axes)
data[b] = ret_val[0]
if seg is not None:
seg[b] = ret_val[1]
data_dict[self.data_key] = data
if seg is not None:
data_dict[self.label_key] = seg
return data_dict
class ChannelTranslation(AbstractTransform):
"""Simulates badly aligned color channels/modalities by shifting them against each other
Args:
const_channel: Which color channel is constant? The others are shifted
max_shifts (dict {'x':2, 'y':2, 'z':2}): How many pixels should be shifted for each channel?
"""
def __init__(self, const_channel=0, max_shifts=None, data_key="data", label_key="seg"):
self.data_key = data_key
self.label_key = label_key
self.max_shift = max_shifts
self.const_channel = const_channel
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
ret_val = augment_channel_translation(data=data, const_channel=self.const_channel, max_shifts=self.max_shift)
data_dict[self.data_key] = ret_val[0]
return data_dict
class SpatialTransform(AbstractTransform):
"""The ultimate spatial transform generator. Rotation, deformation, scaling, cropping: It has all you ever dreamed
of. Computational time scales only with patch_size, not with input patch size or type of augmentations used.
Internally, this transform will use a coordinate grid of shape patch_size to which the transformations are
applied (very fast). Interpolation on the image data will only be done at the very end
Args:
patch_size (tuple/list/ndarray of int): Output patch size
patch_center_dist_from_border (tuple/list/ndarray of int, or int): How far should the center pixel of the
extracted patch be from the image border? Recommended to use patch_size//2.
This only applies when random_crop=True
do_elastic_deform (bool): Whether or not to apply elastic deformation
alpha (tuple of float): magnitude of the elastic deformation; randomly sampled from interval
sigma (tuple of float): scale of the elastic deformation (small = local, large = global); randomly sampled
from interval
do_rotation (bool): Whether or not to apply rotation
angle_x, angle_y, angle_z (tuple of float): angle in rad; randomly sampled from interval. Always double check
whether axes are correct!
do_scale (bool): Whether or not to apply scaling
scale (tuple of float): scale range ; scale is randomly sampled from interval
border_mode_data: How to treat border pixels in data? see scipy.ndimage.map_coordinates
border_cval_data: If border_mode_data=constant, what value to use?
order_data: Order of interpolation for data. see scipy.ndimage.map_coordinates
border_mode_seg: How to treat border pixels in seg? see scipy.ndimage.map_coordinates
border_cval_seg: If border_mode_seg=constant, what value to use?
order_seg: Order of interpolation for seg. see scipy.ndimage.map_coordinates. Strongly recommended to use 0!
If !=0 then you will have to round to int and also beware of interpolation artifacts if you have more then
labels 0 and 1. (for example if you have [0, 0, 0, 2, 2, 1, 0] the neighboring [0, 0, 2] bay result in [0, 1, 2])
random_crop: True: do a random crop of size patch_size and minimal distance to border of
patch_center_dist_from_border. False: do a center crop of size patch_size
independent_scale_for_each_axis: If True, a scale factor will be chosen independently for each axis.
"""
def __init__(self, patch_size, patch_center_dist_from_border=30,
do_elastic_deform=True, alpha=(0., 1000.), sigma=(10., 13.),
do_rotation=True, angle_x=(0, 2 * np.pi), angle_y=(0, 2 * np.pi), angle_z=(0, 2 * np.pi),
do_scale=True, scale=(0.75, 1.25), border_mode_data='nearest', border_cval_data=0, order_data=3,
border_mode_seg='constant', border_cval_seg=0, order_seg=0, random_crop=True, data_key="data",
label_key="seg", p_el_per_sample=1, p_scale_per_sample=1, p_rot_per_sample=1,
independent_scale_for_each_axis=False, p_rot_per_axis:float=1, p_independent_scale_per_axis: int=1):
self.independent_scale_for_each_axis = independent_scale_for_each_axis
self.p_rot_per_sample = p_rot_per_sample
self.p_scale_per_sample = p_scale_per_sample
self.p_el_per_sample = p_el_per_sample
self.data_key = data_key
self.label_key = label_key
self.patch_size = patch_size
self.patch_center_dist_from_border = patch_center_dist_from_border
self.do_elastic_deform = do_elastic_deform
self.alpha = alpha
self.sigma = sigma
self.do_rotation = do_rotation
self.angle_x = angle_x
self.angle_y = angle_y
self.angle_z = angle_z
self.do_scale = do_scale
self.scale = scale
self.border_mode_data = border_mode_data
self.border_cval_data = border_cval_data
self.order_data = order_data
self.border_mode_seg = border_mode_seg
self.border_cval_seg = border_cval_seg
self.order_seg = order_seg
self.random_crop = random_crop
self.p_rot_per_axis = p_rot_per_axis
self.p_independent_scale_per_axis = p_independent_scale_per_axis
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
seg = data_dict.get(self.label_key)
if self.patch_size is None:
if len(data.shape) == 4:
patch_size = (data.shape[2], data.shape[3])
elif len(data.shape) == 5:
patch_size = (data.shape[2], data.shape[3], data.shape[4])
else:
raise ValueError("only support 2D/3D batch data.")
else:
patch_size = self.patch_size
ret_val = augment_spatial(data, seg, patch_size=patch_size,
patch_center_dist_from_border=self.patch_center_dist_from_border,
do_elastic_deform=self.do_elastic_deform, alpha=self.alpha, sigma=self.sigma,
do_rotation=self.do_rotation, angle_x=self.angle_x, angle_y=self.angle_y,
angle_z=self.angle_z, do_scale=self.do_scale, scale=self.scale,
border_mode_data=self.border_mode_data,
border_cval_data=self.border_cval_data, order_data=self.order_data,
border_mode_seg=self.border_mode_seg, border_cval_seg=self.border_cval_seg,
order_seg=self.order_seg, random_crop=self.random_crop,
p_el_per_sample=self.p_el_per_sample, p_scale_per_sample=self.p_scale_per_sample,
p_rot_per_sample=self.p_rot_per_sample,
independent_scale_for_each_axis=self.independent_scale_for_each_axis,
p_rot_per_axis=self.p_rot_per_axis,
p_independent_scale_per_axis=self.p_independent_scale_per_axis)
data_dict[self.data_key] = ret_val[0]
if seg is not None:
data_dict[self.label_key] = ret_val[1]
return data_dict
class SpatialTransform_2(AbstractTransform):
"""The ultimate spatial transform generator. Rotation, deformation, scaling, cropping: It has all you ever dreamed
of. Computational time scales only with patch_size, not with input patch size or type of augmentations used.
Internally, this transform will use a coordinate grid of shape patch_size to which the transformations are
applied (very fast). Interpolation on the image data will only be done at the very end
Args:
patch_size (tuple/list/ndarray of int): Output patch size
patch_center_dist_from_border (tuple/list/ndarray of int, or int): How far should the center pixel of the
extracted patch be from the image border? Recommended to use patch_size//2.
This only applies when random_crop=True
do_elastic_deform (bool): Whether or not to apply elastic deformation
alpha (tuple of float): magnitude of the elastic deformation; randomly sampled from interval
sigma (tuple of float): scale of the elastic deformation (small = local, large = global); randomly sampled
from interval
do_rotation (bool): Whether or not to apply rotation
angle_x, angle_y, angle_z (tuple of float): angle in rad; randomly sampled from interval. Always double check
whether axes are correct!
do_scale (bool): Whether or not to apply scaling
scale (tuple of float): scale range ; scale is randomly sampled from interval
border_mode_data: How to treat border pixels in data? see scipy.ndimage.map_coordinates
border_cval_data: If border_mode_data=constant, what value to use?
order_data: Order of interpolation for data. see scipy.ndimage.map_coordinates
border_mode_seg: How to treat border pixels in seg? see scipy.ndimage.map_coordinates
border_cval_seg: If border_mode_seg=constant, what value to use?
order_seg: Order of interpolation for seg. see scipy.ndimage.map_coordinates. Strongly recommended to use 0!
If !=0 then you will have to round to int and also beware of interpolation artifacts if you have more then
labels 0 and 1. (for example if you have [0, 0, 0, 2, 2, 1, 0] the neighboring [0, 0, 2] bay result in [0, 1, 2])
random_crop: True: do a random crop of size patch_size and minimal distance to border of
patch_center_dist_from_border. False: do a center crop of size patch_size
"""
def __init__(self, patch_size, patch_center_dist_from_border=30,
do_elastic_deform=True, deformation_scale=(0, 0.25),
do_rotation=True, angle_x=(0, 2 * np.pi), angle_y=(0, 2 * np.pi), angle_z=(0, 2 * np.pi),
do_scale=True, scale=(0.75, 1.25), border_mode_data='nearest', border_cval_data=0, order_data=3,
border_mode_seg='constant', border_cval_seg=0, order_seg=0, random_crop=True, data_key="data",
label_key="seg", p_el_per_sample=1, p_scale_per_sample=1, p_rot_per_sample=1,
independent_scale_for_each_axis=False, p_rot_per_axis:float=1, p_independent_scale_per_axis: int=1):
self.p_rot_per_sample = p_rot_per_sample
self.p_scale_per_sample = p_scale_per_sample
self.p_el_per_sample = p_el_per_sample
self.data_key = data_key
self.label_key = label_key
self.patch_size = patch_size
self.patch_center_dist_from_border = patch_center_dist_from_border
self.do_elastic_deform = do_elastic_deform
self.deformation_scale = deformation_scale
self.do_rotation = do_rotation
self.angle_x = angle_x
self.angle_y = angle_y
self.angle_z = angle_z
self.do_scale = do_scale
self.scale = scale
self.border_mode_data = border_mode_data
self.border_cval_data = border_cval_data
self.order_data = order_data
self.border_mode_seg = border_mode_seg
self.border_cval_seg = border_cval_seg
self.order_seg = order_seg
self.random_crop = random_crop
self.p_independent_scale_per_axis = p_independent_scale_per_axis
self.independent_scale_for_each_axis = independent_scale_for_each_axis
self.p_rot_per_axis = p_rot_per_axis
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
seg = data_dict.get(self.label_key)
# print(f'SpatialTransform_2, {data.shape}, {seg.shape}')
if self.patch_size is None:
if len(data.shape) == 4:
patch_size = (data.shape[2], data.shape[3])
elif len(data.shape) == 5:
patch_size = (data.shape[2], data.shape[3], data.shape[4])
else:
raise ValueError("only support 2D/3D batch data.")
else:
patch_size = self.patch_size
ret_val = augment_spatial_2(data, seg, patch_size=patch_size,
patch_center_dist_from_border=self.patch_center_dist_from_border,
do_elastic_deform=self.do_elastic_deform, deformation_scale=self.deformation_scale,
do_rotation=self.do_rotation, angle_x=self.angle_x, angle_y=self.angle_y,
angle_z=self.angle_z, do_scale=self.do_scale, scale=self.scale,
border_mode_data=self.border_mode_data,
border_cval_data=self.border_cval_data, order_data=self.order_data,
border_mode_seg=self.border_mode_seg, border_cval_seg=self.border_cval_seg,
order_seg=self.order_seg, random_crop=self.random_crop,
p_el_per_sample=self.p_el_per_sample, p_scale_per_sample=self.p_scale_per_sample,
p_rot_per_sample=self.p_rot_per_sample,
independent_scale_for_each_axis=self.independent_scale_for_each_axis,
p_rot_per_axis=self.p_rot_per_axis,
p_independent_scale_per_axis=self.p_independent_scale_per_axis)
data_dict[self.data_key] = ret_val[0]
if seg is not None:
data_dict[self.label_key] = ret_val[1]
return data_dict
class TransposeAxesTransform(AbstractTransform):
def __init__(self, transpose_any_of_these=(0, 1, 2), data_key="data", label_key="seg", p_per_sample=1):
'''
This transform will randomly shuffle the axes of transpose_any_of_these.
Requires your patch size to have the same dimension in all axes specified in transpose_any_of_these. So if
transpose_any_of_these=(0, 1, 2) the shape must be (128x128x128) and cannotbe, for example (128x128x96)
(transpose_any_of_these=(0, 1) would be the correct one here)!
:param transpose_any_of_these: spatial dimensions to transpose, 0=x, 1=y, 2=z. Must be a tuple/list of len>=2
:param data_key:
:param label_key:
'''
self.p_per_sample = p_per_sample
self.data_key = data_key
self.label_key = label_key
self.transpose_any_of_these = transpose_any_of_these
if max(transpose_any_of_these) > 2:
raise ValueError("TransposeAxesTransform now takes the axes as the spatial dimensions. What previously was "
"axes=(2, 3, 4) to mirror along all spatial dimensions of a 5d tensor (b, c, x, y, z) "
"is now axes=(0, 1, 2). Please adapt your scripts accordingly.")
assert isinstance(transpose_any_of_these, (list, tuple)), "transpose_any_of_these must be either list or tuple"
assert len(
transpose_any_of_these) >= 2, "len(transpose_any_of_these) must be >=2 -> we need at least 2 axes we " \
"can transpose"
def __call__(self, **data_dict):
data = data_dict.get(self.data_key)
seg = data_dict.get(self.label_key)
for b in range(len(data)):
if np.random.uniform() < self.p_per_sample:
if seg is not None:
s = seg[b]
else:
s = None
ret_val = augment_transpose_axes(data[b], s, self.transpose_any_of_these)
data[b] = ret_val[0]
if seg is not None:
seg[b] = ret_val[1]
data_dict[self.data_key] = data
if seg is not None:
data_dict[self.label_key] = seg
return data_dict
| 46.318868 | 121 | 0.641574 | 3,490 | 24,549 | 4.247278 | 0.100287 | 0.024826 | 0.01781 | 0.014572 | 0.806719 | 0.79154 | 0.781218 | 0.776766 | 0.771032 | 0.769075 | 0 | 0.012565 | 0.283515 | 24,549 | 529 | 122 | 46.406427 | 0.830178 | 0.32319 | 0 | 0.82623 | 0 | 0.006557 | 0.046499 | 0.004443 | 0 | 0 | 0 | 0 | 0.006557 | 1 | 0.052459 | false | 0 | 0.009836 | 0 | 0.114754 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed80b1b7246eaeb87bd037d39e5d5ab09376c4d5 | 93 | py | Python | models/__init__.py | cshyundev/LW-PSMNet | d80d3b12c55ba30c781a7578a4728a2cd6321866 | [
"MIT"
] | 1 | 2022-01-22T14:00:27.000Z | 2022-01-22T14:00:27.000Z | models/__init__.py | cshyundev/LW-PSMNet | d80d3b12c55ba30c781a7578a4728a2cd6321866 | [
"MIT"
] | null | null | null | models/__init__.py | cshyundev/LW-PSMNet | d80d3b12c55ba30c781a7578a4728a2cd6321866 | [
"MIT"
] | null | null | null | from .basic import PSMNet as basic
from .stackhourglass import PSMNet as stackhourglass_org
| 23.25 | 56 | 0.83871 | 13 | 93 | 5.923077 | 0.538462 | 0.311688 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139785 | 93 | 3 | 57 | 31 | 0.9625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9c13055bfb339e379e98c137f06d0df92b6ccc99 | 2,921 | py | Python | djstripe/migrations/0019_auto_20180110_0616.py | ComFreight/cmft-stripe-integration | 85a2e14dcd6fffd24e999b1f383dd7eb006606e0 | [
"MIT"
] | null | null | null | djstripe/migrations/0019_auto_20180110_0616.py | ComFreight/cmft-stripe-integration | 85a2e14dcd6fffd24e999b1f383dd7eb006606e0 | [
"MIT"
] | null | null | null | djstripe/migrations/0019_auto_20180110_0616.py | ComFreight/cmft-stripe-integration | 85a2e14dcd6fffd24e999b1f383dd7eb006606e0 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.1 on 2018-01-10 04:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0018_auto_20180110_0559'),
]
operations = [
migrations.AlterField(
model_name='account',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='card',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='charge',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='coupon',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='customer',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='dispute',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='event',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='invoice',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='invoiceitem',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='payout',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='plan',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='source',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='subscription',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='transfer',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 34.77381 | 92 | 0.574803 | 281 | 2,921 | 5.814947 | 0.185053 | 0.102815 | 0.214198 | 0.24847 | 0.830477 | 0.830477 | 0.830477 | 0.830477 | 0.830477 | 0.830477 | 0 | 0.015189 | 0.301267 | 2,921 | 83 | 93 | 35.192771 | 0.785399 | 0.015406 | 0 | 0.727273 | 1 | 0 | 0.064022 | 0.008003 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.012987 | 0 | 0.051948 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
9c59b45a1740da770ea40241eb92ee622be1fb39 | 8,521 | py | Python | se34euca/lib/EucaUITestLib_Keypair.py | eucalyptus/se34euca | af5da36754fccca84b7f260ba7605b8fdc30fa55 | [
"BSD-2-Clause"
] | 8 | 2015-01-08T21:06:08.000Z | 2019-10-26T13:17:16.000Z | se34euca/lib/EucaUITestLib_Keypair.py | eucalyptus/se34euca | af5da36754fccca84b7f260ba7605b8fdc30fa55 | [
"BSD-2-Clause"
] | null | null | null | se34euca/lib/EucaUITestLib_Keypair.py | eucalyptus/se34euca | af5da36754fccca84b7f260ba7605b8fdc30fa55 | [
"BSD-2-Clause"
] | 7 | 2016-08-31T07:02:21.000Z | 2020-07-18T00:10:36.000Z | from se34euca.lib.EucaUITestLib_Base import *
class EucaUITestLib_Keypair(EucaUITestLib_Base):
def test_ui_gotopage_keypairs(self):
print
print "Started Test: GotoPage Keypairs"
print
self.click_element_by_id("euca-logo")
print
print "Test: Received the Page Title -> " + self.driver.title
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.verify_element_by_id("table-keys-new")
print
print "Finished Test: GotoPage Keypairs"
print
return 0
def test_ui_generate_keypair(self):
print
print "Started Test: Generate Keypair"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.click_element_by_id("table-keys-new")
print
print "Test: Generate New Keypair"
self.set_keys_by_id("key-add-name", "my-sel-gen-key-00")
self.click_element_by_id("keys-add-btn")
print
print "Finished Test: Generate Keypair"
print
return 0
def test_ui_generate_keypair_given_name(self, keypair_name):
print
print "Started Test: Generate Keypair Given Name"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.click_element_by_id("table-keys-new")
print
print "Test: Generate New Keypair"
self.set_keys_by_id("key-add-name", keypair_name)
self.click_element_by_id("keys-add-btn")
print
print "Finished Test: Generate Keypair Given Name"
print
return 0
def test_ui_import_keypair(self):
print
print "Started Test: Import Keypair"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.click_element_by_id("table-keys-extra")
print
print "Test: Import Keypair"
self.set_keys_by_id(this_id="key-import-contents",
keys="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDI1x6tEjkBQCCP0ssF69vAgP2xg+N9ScoTrqRqyl5w4qEgsV/AppfHHYRKYr0N/tTyG4/z1XGNrB2SaslnRpgEOsvMZldlOnqsUujL2fgoEg+/gB92+1JhZgTjU8nL5j5BFkVTh93nSHtXHdzYl7SjlXrv26ZbyuDwJmI+s6bJQk5noJ4Q4g7N/0y9pHRvezyhgxkyX7PQoA9WJm8SqlakyhMYa0j/baMhb/ehSI0VvwNodmcaWaS6Z2F4rZS4C2DmCUDXYy/1+0tiRTjHjlPbqRKCVKam8ImWytlZD0zbdV/tpADxDpnhW2cPVpXcjy4sRzUCc8AZW+OE3LQxXild alicehubenko@Alices-MacBook-Pro.local")
self.set_keys_by_id('key-import-name', "import-key")
self.click_element_by_id("keys-add-btn")
print
print "Finished Test: Import Keypair"
print
return 0
def test_ui_import_keypair_given_name(self, keypair_name):
print
print "Started Test: Import Keypair Given Name"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.click_element_by_id("table-keys-extra")
print
print "Test: Import Keypair " + keypair_name
self.set_keys_by_id(this_id="key-import-contents",
keys="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDI1x6tEjkBQCCP0ssF69vAgP2xg+N9ScoTrqRqyl5w4qEgsV/AppfHHYRKYr0N/tTyG4/z1XGNrB2SaslnRpgEOsvMZldlOnqsUujL2fgoEg+/gB92+1JhZgTjU8nL5j5BFkVTh93nSHtXHdzYl7SjlXrv26ZbyuDwJmI+s6bJQk5noJ4Q4g7N/0y9pHRvezyhgxkyX7PQoA9WJm8SqlakyhMYa0j/baMhb/ehSI0VvwNodmcaWaS6Z2F4rZS4C2DmCUDXYy/1+0tiRTjHjlPbqRKCVKam8ImWytlZD0zbdV/tpADxDpnhW2cPVpXcjy4sRzUCc8AZW+OE3LQxXild alicehubenko@Alices-MacBook-Pro.local")
self.set_keys_by_id('key-import-name', keypair_name)
self.click_element_by_id("keys-add-btn")
print
print "Finished Test: Import Keypair Given Name"
print
return 0
#Verifying on Key Pairs landing page by key pair name that key pair is present
def test_ui_verify_keypair_given_name(self, keypair_name):
print
print "Started Test: Verify Keypair Given Name"
print
self.click_element_by_link_text(link_text="Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print "Verifying that the Keypair " + keypair_name + " exists"
self.verify_element_by_id(keypair_name)
print
print "Finished Test: Verify Keypair Given Name"
print
return 0
def test_ui_verify_delete_keypair_given_name(self, keypair_name):
print
print "Started Test: Verify Delete Keypair Given Name"
print
self.click_element_by_link_text(link_text="Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print "Verifying that the Keypair " + keypair_name + " is deleted"
self.verify_element_not_present("ID", keypair_name)
print
print "Finished Test: Verify Delete Keypair Given Name"
print
return 0
def test_ui_check_keypair_count(self, keys_count):
print
print "Started Test: Check Keypair Count"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
print "Verifying that Keypair Count on Dashboard is " + keys_count
self.verify_text_displayed_by_css("#dashboard-netsec-keypair > span", keys_count)
print
print "Finished Test: Check Keypair Count"
print
return 0
def test_ui_delete_keypair(self):
'''
Goes to Key Pairs LP and deletes the first key pair from top.
'''
print
print "Started Test: Delete Keypair"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.click_element_by_xpath("//table[@id='keys']/tbody/tr/td[2]")
self.click_element_by_id("more-actions-keys")
print
print "Test: Delete Keypair"
self.click_element_by_link_text("Delete")
self.click_element_by_id("btn-keys-delete-delete")
print
print "Finished Test: Delete Keypair"
print
return 0
def test_ui_delete_keypair_given_name(self, keypair_name):
print
print "Started Test: Delete Keypair Given Name"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.click_element_by_id(keypair_name)
self.click_element_by_id("more-actions-keys")
print
print "Test: Delete Keypair"
self.click_element_by_link_text("Delete")
self.click_element_by_id("btn-keys-delete-delete")
print
print "Finished Test: Delete Keypair"
print
return 0
def test_ui_delete_keypair_all(self):
print
print "Started Test: Delete Keypair All"
print
self.click_element_by_link_text("Dashboard")
self.verify_element_by_link_text("Launch new instance")
self.click_element_by_id("dashboard-netsec-keypair")
print
print "Test: Clicked the GoToPage Button"
self.click_element_by_id("keys-check-all")
self.click_element_by_id("more-actions-keys")
print
print "Test: Delete Keypair All"
self.click_element_by_link_text("Delete")
self.click_element_by_id("btn-keys-delete-delete")
print
print
print "Finished Test: Delete Keypair All"
print
return 0
if __name__ == "__main__":
unittest.main()
| 39.632558 | 454 | 0.683018 | 1,027 | 8,521 | 5.390458 | 0.110029 | 0.086163 | 0.118497 | 0.133309 | 0.881864 | 0.855853 | 0.820809 | 0.817738 | 0.771857 | 0.753071 | 0 | 0.018859 | 0.240817 | 8,521 | 214 | 455 | 39.817757 | 0.836915 | 0.009037 | 0 | 0.717277 | 0 | 0.010471 | 0.379892 | 0.141592 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.068063 | null | null | 0.534031 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
13408936d5f202f8b52becfabf9752a13df36e74 | 10,258 | py | Python | tests/integration/verify/v2/service/rate_limit/test_bucket.py | fefi95/twilio-python | b9bfea293b6133fe84d4d8d3ac4e2a75381c3881 | [
"MIT"
] | 1 | 2019-12-30T21:46:55.000Z | 2019-12-30T21:46:55.000Z | tests/integration/verify/v2/service/rate_limit/test_bucket.py | fefi95/twilio-python | b9bfea293b6133fe84d4d8d3ac4e2a75381c3881 | [
"MIT"
] | null | null | null | tests/integration/verify/v2/service/rate_limit/test_bucket.py | fefi95/twilio-python | b9bfea293b6133fe84d4d8d3ac4e2a75381c3881 | [
"MIT"
] | null | null | null | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class BucketTestCase(IntegrationTestCase):
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets.create(max=1, interval=1)
values = {'Max': 1, 'Interval': 1, }
self.holodeck.assert_has_request(Request(
'post',
'https://verify.twilio.com/v2/Services/VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/RateLimits/RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Buckets',
data=values,
))
def test_create_bucket_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"rate_limit_sid": "RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"max": 5,
"interval": 60,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets/BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets.create(max=1, interval=1)
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets(sid="BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.holodeck.assert_has_request(Request(
'post',
'https://verify.twilio.com/v2/Services/VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/RateLimits/RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Buckets/BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_update_bucket_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"rate_limit_sid": "RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"max": 5,
"interval": 60,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets/BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets(sid="BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets(sid="BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://verify.twilio.com/v2/Services/VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/RateLimits/RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Buckets/BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_fetch_bucket_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"rate_limit_sid": "RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"max": 5,
"interval": 60,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets/BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets(sid="BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets.list()
self.holodeck.assert_has_request(Request(
'get',
'https://verify.twilio.com/v2/Services/VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/RateLimits/RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Buckets',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"buckets": [],
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets?PageSize=50&Page=0",
"previous_page_url": null,
"url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets?PageSize=50&Page=0",
"next_page_url": null,
"key": "buckets"
}
}
'''
))
actual = self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets.list()
self.assertIsNotNone(actual)
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"buckets": [
{
"sid": "BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"rate_limit_sid": "RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"max": 5,
"interval": 60,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets/BLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
],
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets?PageSize=50&Page=0",
"previous_page_url": null,
"url": "https://verify.twilio.com/v2/Services/VAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/RateLimits/RKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Buckets?PageSize=50&Page=0",
"next_page_url": null,
"key": "buckets"
}
}
'''
))
actual = self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets.list()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets(sid="BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://verify.twilio.com/v2/Services/VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/RateLimits/RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Buckets/BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.verify.v2.services(sid="VAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.rate_limits(sid="RKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.buckets(sid="BLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.assertTrue(actual)
| 44.6 | 194 | 0.585787 | 720 | 10,258 | 8.213889 | 0.1375 | 0.040582 | 0.037369 | 0.043963 | 0.939804 | 0.922726 | 0.911397 | 0.904295 | 0.888739 | 0.888739 | 0 | 0.029532 | 0.310099 | 10,258 | 229 | 195 | 44.79476 | 0.806133 | 0.010626 | 0 | 0.775701 | 1 | 0.028037 | 0.286572 | 0.158799 | 0 | 0 | 0 | 0 | 0.149533 | 1 | 0.102804 | false | 0 | 0.037383 | 0 | 0.149533 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
13496264ea5aaeb52e1ff140c5e6868b575bb092 | 22,404 | py | Python | DynamixelWorkingMotionControl/fh_task2.py | tummalag/Controlled-Flight-of-High-DOF-Humanoid-Robot | 383f41c504b5a08ffd6d0b4c8f6013e4070d8458 | [
"MIT"
] | null | null | null | DynamixelWorkingMotionControl/fh_task2.py | tummalag/Controlled-Flight-of-High-DOF-Humanoid-Robot | 383f41c504b5a08ffd6d0b4c8f6013e4070d8458 | [
"MIT"
] | null | null | null | DynamixelWorkingMotionControl/fh_task2.py | tummalag/Controlled-Flight-of-High-DOF-Humanoid-Robot | 383f41c504b5a08ffd6d0b4c8f6013e4070d8458 | [
"MIT"
] | null | null | null | # Task 2 is checking all dynamixels
from __future__ import print_function
import os
import ctypes
import time
if os.name == 'nt':
import msvcrt
def getch():
return msvcrt.getch().decode()
else:
import termios, fcntl, sys, os
from select import select
fd = sys.stdin.fileno()
old_term = termios.tcgetattr(fd)
new_term = termios.tcgetattr(fd)
def getch():
new_term[3] = (new_term[3] & ~termios.ICANON & ~termios.ECHO)
termios.tcsetattr(fd, termios.TCSANOW, new_term)
try:
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_term)
return ch
def kbhit():
new_term[3] = (new_term[3] & ~(termios.ICANON | termios.ECHO))
termios.tcsetattr(fd, termios.TCSANOW, new_term)
try:
dr,dw,de = select([sys.stdin], [], [], 0)
if dr != []:
return 1
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_term)
sys.stdout.flush()
return 0
from dynamixel_sdk import * # Uses Dynamixel SDK library
# Control table address
ADDR_OPERATING_MODE = 11 # Control table address is different in Dynamixel model
ADDR_TORQUE_ENABLE = 64
ADDR_GOAL_POSITION = 116
ADDR_PRESENT_POSITION = 132
# Protocol version
PROTOCOL_VERSION = 2.0 # See which protocol version is used in the Dynamixel
# Default setting
DXL1_ID = 91 # Dynamixel ID : 1
DXL2_ID = 92
DXL3_ID = 93
DXL4_ID = 94
DXL5_ID = 95
DXL6_ID = 96
BAUDRATE = 1000000 # Dynamixel default baudrate : 57600
DEVICENAME = '/dev/ttyUSB0' # Check which port is being used on your controller
# ex) Windows: "COM1" Linux: "/dev/ttyUSB0" Mac: "/dev/tty.usbserial-*"
TORQUE_ENABLE = 1 # Value for enabling the torque
TORQUE_DISABLE = 0 # Value for disabling the torque
DXL_MOVING_STATUS_THRESHOLD = 5 # Dynamixel will rotate between this value
ESC_ASCII_VALUE = 0x1b
SPACE_ASCII_VALUE = 0x20
index = 0
dxl1_goal_position = [2048,2048,2048,2048,2048,3072] # Goal position
dxl2_goal_position = [2048,2048,2048,2048,2048,1024] # Goal position
dxl3_goal_position = [2048,2048,2048,2048,1024,2048] # Goal position
dxl4_goal_position = [2048,2048,2048,2048,3072,2048] # Goal position
dxl5_goal_position = [2048,1000,3000,2048,2048,2048] # Goal position
dxl6_goal_position = [2048,1000,3000,2048,2048,2048] # Goal position
# Initialize PortHandler instance
# Set the port path
# Get methods and members of PortHandlerLinux or PortHandlerWindows
portHandler = PortHandler(DEVICENAME)
# Initialize PacketHandler instance
# Set the protocol version
# Get methods and members of Protocol1PacketHandler or Protocol2PacketHandler
packetHandler = PacketHandler(PROTOCOL_VERSION)
# Open port
if portHandler.openPort():
print("Succeeded to open the port")
else:
print("Failed to open the port")
print("Press any key to terminate...")
getch()
quit()
# Set port baudrate
if portHandler.setBaudRate(BAUDRATE):
print("Succeeded to change the baudrate")
else:
print("Failed to change the baudrate")
print("Press any key to terminate...")
getch()
quit()
# Enable Dynamixel Torque DXL1
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL1_ID, ADDR_TORQUE_ENABLE, TORQUE_ENABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
else:
print("Dynamixel 1 has been successfully connected")
# Enable Dynamixel Torque DXL2
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL2_ID, ADDR_TORQUE_ENABLE, TORQUE_ENABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
else:
print("Dynamixel 2 has been successfully connected")
# Enable Dynamixel Torque DXL3
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL3_ID, ADDR_TORQUE_ENABLE, TORQUE_ENABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
else:
print("Dynamixel 3 has been successfully connected")
# Enable Dynamixel Torque DXL4
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL4_ID, ADDR_TORQUE_ENABLE, TORQUE_ENABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
else:
print("Dynamixel 4 has been successfully connected")
# Enable Dynamixel Torque DXL5
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL5_ID, ADDR_TORQUE_ENABLE, TORQUE_ENABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
else:
print("Dynamixel 5 has been successfully connected")
# Enable Dynamixel Torque DXL6
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL6_ID, ADDR_TORQUE_ENABLE, TORQUE_ENABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
else:
print("Dynamixel 6 has been successfully connected")
while 1:
print("\nPress any key to continue! (or press ESC to quit!)")
if getch() == chr(ESC_ASCII_VALUE):
break
print(" Press SPACE key to clear multi-turn information! (or press ESC to stop!)")
# Write goal position dxl1
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL1_ID, ADDR_GOAL_POSITION, dxl1_goal_position[index])
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write goal position dxl2
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL2_ID, ADDR_GOAL_POSITION, dxl2_goal_position[index])
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write goal position dxl3
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL3_ID, ADDR_GOAL_POSITION, dxl3_goal_position[index])
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write goal position dxl4
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL4_ID, ADDR_GOAL_POSITION, dxl4_goal_position[index])
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write goal position dxl5
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL5_ID, ADDR_GOAL_POSITION, dxl5_goal_position[index])
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write goal position dxl6
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL6_ID, ADDR_GOAL_POSITION, dxl6_goal_position[index])
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
while 1:
# Read present position DXL1
dxl1_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL1_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL2
dxl2_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL2_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL3
dxl3_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL3_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL4
dxl4_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL4_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL5
dxl5_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL5_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL6
dxl6_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL6_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
print(" [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d" % (DXL1_ID, dxl1_present_position, DXL2_ID, dxl2_present_position,DXL3_ID, dxl3_present_position, DXL4_ID, dxl4_present_position,DXL5_ID, dxl5_present_position, DXL6_ID, dxl6_present_position))
if kbhit():
c = getch()
if c == chr(SPACE_ASCII_VALUE):
print("\n Stop & Clear Multi-Turn Information! ")
# Write the present position to the goal position to stop moving DXL1
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL1_ID, ADDR_GOAL_POSITION, dxl1_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL2
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL2_ID, ADDR_GOAL_POSITION, dxl2_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL3
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL3_ID, ADDR_GOAL_POSITION, dxl3_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL4
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL4_ID, ADDR_GOAL_POSITION, dxl4_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL5
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL5_ID, ADDR_GOAL_POSITION, dxl5_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL6
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL6_ID, ADDR_GOAL_POSITION, dxl6_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
time.sleep(0.3)
# Read present position DXL1
dxl1_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL1_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL2
dxl2_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL2_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL3
dxl3_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL3_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL4
dxl4_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL4_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL5
dxl5_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL5_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Read present position DXL6
dxl6_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL6_ID, ADDR_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
print(" [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d, [ID:%03d] : %03d" % (DXL1_ID, dxl1_present_position, DXL2_ID, dxl2_present_position,DXL3_ID, dxl3_present_position, DXL4_ID, dxl4_present_position,DXL5_ID, dxl5_present_position, DXL6_ID, dxl6_present_position))
break
elif c == chr(ESC_ASCII_VALUE):
print("\n Stopped!!")
# Write the present position to the goal position to stop moving DXL1
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL1_ID, ADDR_GOAL_POSITION, dxl1_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL2
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL2_ID, ADDR_GOAL_POSITION, dxl2_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL3
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL3_ID, ADDR_GOAL_POSITION, dxl3_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL4
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL4_ID, ADDR_GOAL_POSITION, dxl4_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL5
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL5_ID, ADDR_GOAL_POSITION, dxl5_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Write the present position to the goal position to stop moving DXL6
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL6_ID, ADDR_GOAL_POSITION, dxl6_present_position)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
break
if (abs(dxl1_goal_position[index] - dxl1_present_position) < DXL_MOVING_STATUS_THRESHOLD) and (abs(dxl2_goal_position[index] - dxl2_present_position) < DXL_MOVING_STATUS_THRESHOLD and abs(dxl3_goal_position[index] - dxl3_present_position) < DXL_MOVING_STATUS_THRESHOLD) and (abs(dxl4_goal_position[index] - dxl4_present_position) < DXL_MOVING_STATUS_THRESHOLD and abs(dxl5_goal_position[index] - dxl5_present_position) < DXL_MOVING_STATUS_THRESHOLD) and (abs(dxl6_goal_position[index] - dxl6_present_position) < DXL_MOVING_STATUS_THRESHOLD):
break
time.sleep(1)
# Change goal position
if index > 5:
index = 0
else:
index += 1
# Disable Dynamixel Torque DXL1
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL1_ID, ADDR_TORQUE_ENABLE, TORQUE_DISABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Disable Dynamixel Torque DXL2
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL2_ID, ADDR_TORQUE_ENABLE, TORQUE_DISABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Disable Dynamixel Torque DXL1
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL3_ID, ADDR_TORQUE_ENABLE, TORQUE_DISABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Disable Dynamixel Torque DXL2
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL4_ID, ADDR_TORQUE_ENABLE, TORQUE_DISABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Disable Dynamixel Torque DXL1
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL5_ID, ADDR_TORQUE_ENABLE, TORQUE_DISABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Disable Dynamixel Torque DXL2
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL6_ID, ADDR_TORQUE_ENABLE, TORQUE_DISABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Close port
portHandler.closePort()
| 49.676275 | 549 | 0.680191 | 2,634 | 22,404 | 5.498861 | 0.077069 | 0.060895 | 0.11309 | 0.046396 | 0.859224 | 0.850387 | 0.843828 | 0.822425 | 0.801919 | 0.795153 | 0 | 0.029764 | 0.232191 | 22,404 | 450 | 550 | 49.786667 | 0.812231 | 0.11373 | 0 | 0.708075 | 0 | 0.006211 | 0.050751 | 0 | 0 | 0 | 0.000404 | 0 | 0 | 0 | null | null | 0 | 0.024845 | null | null | 0.319876 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1358d3cf7995fd288d3ba04a436d1a68b38c77bc | 7,518 | py | Python | experiments/comparison.py | HaolingZHANG/FasterHammingCalculator | 5966b0a46b126d2f3c95d64d5495d948c6e5ed84 | [
"MIT"
] | 1 | 2022-03-14T03:37:31.000Z | 2022-03-14T03:37:31.000Z | experiments/comparison.py | HaolingZHANG/FasterHammingCalculator | 5966b0a46b126d2f3c95d64d5495d948c6e5ed84 | [
"MIT"
] | null | null | null | experiments/comparison.py | HaolingZHANG/FasterHammingCalculator | 5966b0a46b126d2f3c95d64d5495d948c6e5ed84 | [
"MIT"
] | null | null | null | from matplotlib import pyplot, patches
from numpy import array, random, sum, min, max, median, log10
from time import process_time
from calculator import hamming_group, hamming_matrix
used_colors = [["#FE817D", "#FCBBAE"], ["#81B8DF", "#B1CCDF"]]
labels = ["customized", "numpy.sum"]
bias = [-0.25, 0.25]
# noinspection PyTypeChecker
def calculate_1(random_seed, sample_numbers, variable_number, repeats):
random.seed(random_seed)
results = []
for sample_number in sample_numbers:
our_record, sum_record = [], []
for repeat in range(repeats):
sample = random.randint(0, 2, size=(variable_number,))
sample_group = random.randint(0, 2, size=(int(sample_number), variable_number))
start = process_time()
hamming_group(observed_sample=sample, sample_group=sample_group, threshold=None)
our_record.append(process_time() - start)
start = process_time()
for position in range(len(sample_group)):
sum(sample != sample_group[position])
sum_record.append(process_time() - start)
results.append([our_record, sum_record])
results = array(results)
pyplot.figure(figsize=(10, 5), tight_layout=True)
for data_index in range(4):
for bias_index in range(2):
used_data = results[data_index, bias_index]
if min(used_data) > 1e-10:
used_data = log10(used_data)
if max(used_data) - min(used_data) < 0.02:
result = median(used_data)
pyplot.hlines(result, data_index + bias[bias_index] - 0.08, data_index + bias[bias_index] + 0.08,
linewidths=1, edgecolors=used_colors[bias_index][0], zorder=3)
pyplot.scatter([data_index + bias[bias_index]], result,
color="white", edgecolor=used_colors[bias_index][0], linewidth=1, s=20, zorder=4)
else:
violin = pyplot.violinplot(dataset=used_data, positions=[data_index + bias[bias_index]],
bw_method=0.5, showextrema=False, widths=0.3)
for patch in violin["bodies"]:
patch.set_edgecolor(used_colors[bias_index][0])
patch.set_facecolor(used_colors[bias_index][1])
patch.set_linewidth(1)
patch.set_alpha(1)
pyplot.scatter([data_index + bias[bias_index]], median(used_data),
color="white", edgecolor=used_colors[bias_index][0], linewidth=1, s=40, zorder=4)
else:
pyplot.scatter([data_index + bias[bias_index]], [-2],
color=used_colors[bias_index][0], marker="x", s=60)
if data_index % 2 != 0:
pyplot.fill_between([data_index - 0.5, data_index + 0.5], [-2.2, -2.2], [3.2, 3.2],
color="#F1F1F1", zorder=0)
legends = [patches.Patch(facecolor=used_colors[index][1], edgecolor=used_colors[index][0],
linewidth=1, label=labels[index]) for index in range(2)]
pyplot.legend(handles=legends, loc="upper left", fontsize=12)
pyplot.xlabel("different number of samples with 100 variables", fontsize=12)
pyplot.xlim(-0.5, 3.5)
pyplot.xticks([0, 1, 2, 3], ["$10^4$", "$10^5$", "$10^6$", "$10^7$"], fontsize=12)
pyplot.ylabel("seconds spent", fontsize=12)
pyplot.ylim(-2.2, 3.2)
pyplot.yticks([-2, -1, 0, 1, 2, 3],
["$10^{-2}$", "$10^{-1}$", "$10^0$", "$10^1$", "$10^2$", "$10^3$"], fontsize=12)
pyplot.savefig("result.1.svg", format="svg", bbox_inches="tight", dpi=600)
pyplot.close()
# noinspection PyTypeChecker
def calculate_2(random_seed, sample_numbers, variable_number, repeats):
random.seed(random_seed)
results = []
for sample_number in sample_numbers:
our_record, sum_record = [], []
for repeat in range(repeats):
print(sample_number, repeat + 1, repeats)
samples = random.randint(0, 2, size=(int(sample_number), variable_number))
start = process_time()
hamming_matrix(samples=samples, threshold=None)
our_record.append(process_time() - start)
start = process_time()
for position_1 in range(len(samples) - 1):
for position_2 in range(position_1 + 1, len(samples)):
sum(samples[position_1] != samples[position_2])
sum_record.append(process_time() - start)
results.append([our_record, sum_record])
results = array(results)
for data_index in range(4):
for bias_index in range(2):
used_data = results[data_index, bias_index]
if min(used_data) > 1e-10:
used_data = log10(used_data)
if max(used_data) - min(used_data) < 0.02:
result = median(used_data)
pyplot.hlines(result, data_index + bias[bias_index] - 0.08, data_index + bias[bias_index] + 0.08,
linewidths=1, edgecolors=used_colors[bias_index][0], zorder=3)
pyplot.scatter([data_index + bias[bias_index]], result,
color="white", edgecolor=used_colors[bias_index][0], linewidth=1, s=20, zorder=4)
else:
violin = pyplot.violinplot(dataset=used_data, positions=[data_index + bias[bias_index]],
bw_method=0.5, showextrema=False, widths=0.3)
for patch in violin["bodies"]:
patch.set_edgecolor(used_colors[bias_index][0])
patch.set_facecolor(used_colors[bias_index][1])
patch.set_linewidth(1)
patch.set_alpha(1)
pyplot.scatter([data_index + bias[bias_index]], median(used_data),
color="white", edgecolor=used_colors[bias_index][0], linewidth=1, s=40, zorder=4)
else:
pyplot.scatter([data_index + bias[bias_index]], [-2],
color=used_colors[bias_index][0], marker="x", s=60)
if data_index % 2 != 0:
pyplot.fill_between([data_index - 0.5, data_index + 0.5], [-2.2, -2.2], [3.2, 3.2],
color="#F1F1F1", zorder=0)
legends = [patches.Patch(facecolor=used_colors[index][1], edgecolor=used_colors[index][0],
linewidth=1, label=labels[index]) for index in range(2)]
pyplot.legend(handles=legends, loc="upper left", fontsize=12)
pyplot.xlabel("different number of samples with 20 variables", fontsize=12)
pyplot.xlim(-0.5, 3.5)
pyplot.xticks([0, 1, 2, 3], [r"$500$", r"$1000$", r"$5000$", r"$10000$"], fontsize=12)
pyplot.ylabel("seconds spent", fontsize=12)
pyplot.ylim(-2.2, 3.2)
pyplot.yticks([-2, -1, 0, 1, 2, 3],
["$10^{-2}$", "$10^{-1}$", "$10^0$", "$10^1$", "$10^2$", "$10^3$"], fontsize=12)
pyplot.savefig("result.2.svg", format="svg", bbox_inches="tight", dpi=600)
pyplot.close()
if __name__ == "__main__":
calculate_1(random_seed=2022, sample_numbers=[1e4, 1e5, 1e6, 1e7], variable_number=20, repeats=20)
calculate_2(random_seed=2022, sample_numbers=[5e2, 1e3, 5e3, 1e4], variable_number=20, repeats=20)
| 47.88535 | 117 | 0.571163 | 948 | 7,518 | 4.348101 | 0.165612 | 0.061135 | 0.044153 | 0.049491 | 0.830422 | 0.800097 | 0.800097 | 0.800097 | 0.800097 | 0.800097 | 0 | 0.061224 | 0.289572 | 7,518 | 156 | 118 | 48.192308 | 0.710541 | 0.00705 | 0 | 0.758065 | 0 | 0 | 0.05522 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016129 | false | 0 | 0.032258 | 0 | 0.048387 | 0.008065 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
13c83413c76d996a65cf865f4c4bcc45b28c45d6 | 46,636 | py | Python | script/urls.py | JonasFrey96/ml-hypersim-pytorch | 926192c08689d01bf010976d8d194493f04d2585 | [
"Apache-2.0"
] | null | null | null | script/urls.py | JonasFrey96/ml-hypersim-pytorch | 926192c08689d01bf010976d8d194493f04d2585 | [
"Apache-2.0"
] | null | null | null | script/urls.py | JonasFrey96/ml-hypersim-pytorch | 926192c08689d01bf010976d8d194493f04d2585 | [
"Apache-2.0"
] | 1 | 2021-02-24T04:57:21.000Z | 2021-02-24T04:57:21.000Z | urls_to_download = [
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_014_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_014_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_014_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_011.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_012.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_013.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_014.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_015.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_016.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_017.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_018.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_019.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_011.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_012.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_013.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_014.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_015.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_016.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_017.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_018.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_019.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_020.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_012.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_013.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_014.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_016.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_017.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_018.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_019.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_020.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_010.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_001.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_002.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_003.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_004.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_005.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_006.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_007.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_008.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_009.zip",
"https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_010.zip",
] | 101.603486 | 101 | 0.774488 | 7,315 | 46,636 | 4.81244 | 0.009433 | 0.116837 | 0.194728 | 0.311564 | 0.999432 | 0.999432 | 0.999432 | 0.999432 | 0.999432 | 0.999432 | 0 | 0.072134 | 0.049061 | 46,636 | 459 | 102 | 101.603486 | 0.721656 | 0 | 0 | 0 | 0 | 0.995643 | 0.921114 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
13f1c7a04ac34fbbfe35654c1515b3e7176f1912 | 22,120 | py | Python | Model/Mixmatch_Wide_Resnet.py | maple-research-lab/EnAET | 45872fc8704b723395e8c5a489a3157a771fc6df | [
"MIT"
] | 87 | 2019-11-22T01:53:50.000Z | 2022-03-09T02:07:33.000Z | Model/Mixmatch_Wide_Resnet.py | maple-research-lab/EnAET | 45872fc8704b723395e8c5a489a3157a771fc6df | [
"MIT"
] | 11 | 2019-11-21T15:18:08.000Z | 2022-03-12T00:05:18.000Z | Model/Mixmatch_Wide_Resnet.py | wang3702/EnAET | fd0f5f94afea0860da8671753351b09fa058ac42 | [
"MIT"
] | 9 | 2019-11-22T01:52:49.000Z | 2021-03-28T08:47:05.000Z | # /*******************************************************************************
# * Author : Xiao Wang
# * Email : wang3702@purdue.edu xiaowang20140001@gmail.com
# *******************************************************************************/
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
class BasicBlock(nn.Module):
def __init__(self, in_planes, out_planes, stride, dropRate=0.0, activate_before_residual=False):
super(BasicBlock, self).__init__()
self.bn1 = nn.BatchNorm2d(in_planes, momentum=0.001)
self.relu1 = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.conv1 = nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(out_planes, momentum=0.001)
self.relu2 = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.conv2 = nn.Conv2d(out_planes, out_planes, kernel_size=3, stride=1,
padding=1, bias=False)
self.droprate = dropRate
self.equalInOut = (in_planes == out_planes)
self.convShortcut = (not self.equalInOut) and nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride,
padding=0, bias=False) or None
self.activate_before_residual = activate_before_residual
def forward(self, x):
if not self.equalInOut and self.activate_before_residual == True:
x = self.relu1(self.bn1(x))
else:
out = self.relu1(self.bn1(x))
out = self.relu2(self.bn2(self.conv1(out if self.equalInOut else x)))
if self.droprate > 0:
out = F.dropout(out, p=self.droprate, training=self.training)
out = self.conv2(out)
#print(self.equalInOut)
#print(x.size())
#print(out.size())
#exit()
return torch.add(x if self.equalInOut else self.convShortcut(x), out)
class NetworkBlock(nn.Module):
def __init__(self, nb_layers, in_planes, out_planes, block, stride, dropRate=0.0, activate_before_residual=False):
super(NetworkBlock, self).__init__()
self.layer = self._make_layer(block, in_planes, out_planes, nb_layers, stride, dropRate, activate_before_residual)
def _make_layer(self, block, in_planes, out_planes, nb_layers, stride, dropRate, activate_before_residual):
layers = []
for i in range(int(nb_layers)):
layers.append(block(i == 0 and in_planes or out_planes, out_planes, i == 0 and stride or 1, dropRate, activate_before_residual))
return nn.Sequential(*layers)
def forward(self, x):
return self.layer(x)
class NetworkBlock_Same(nn.Module):
def __init__(self, nb_layers, in_planes, out_planes, block, stride, dropRate=0.0, activate_before_residual=False):
super(NetworkBlock_Same, self).__init__()
self.layer = self._make_layer(block, in_planes, out_planes, nb_layers, stride, dropRate, activate_before_residual)
def _make_layer(self, block, in_planes, out_planes, nb_layers, stride, dropRate, activate_before_residual):
layers = []
for i in range(int(nb_layers)):
layers.append(block(in_planes, out_planes, stride, dropRate, activate_before_residual))
return nn.Sequential(*layers)
def forward(self, x):
return self.layer(x)
from Model.Attention import Self_Attn
class WX_WideResNet(nn.Module):
def __init__(self, num_classes, depth=28, widen_factor=2, dropRate=0.0,run_type=0,num_stages=4):
super(WX_WideResNet, self).__init__()
nChannels = [16, 16*widen_factor, 32*widen_factor, 64*widen_factor]
assert((depth - 4) % 6 == 0)
n = (depth - 4) / 6
block = BasicBlock
self.num_stages = num_stages
if run_type == 0:
blocks = [nn.Sequential() for i in range(self.num_stages)]
else:
blocks = [nn.Sequential() for i in range(self.num_stages + 1)]
# 1st conv before any network block
self.conv1 = nn.Conv2d(3, nChannels[0], kernel_size=3, stride=1,
padding=1, bias=False)
count_stage=0
blocks[count_stage].add_module('Block1', self.conv1)
count_stage += 1
# 1st block
self.block1 = NetworkBlock(n, nChannels[0], nChannels[1], block, 1, dropRate, activate_before_residual=True)
blocks[count_stage].add_module('Block2', self.block1)
count_stage += 1
# 2nd block
self.block2 = NetworkBlock(n, nChannels[1], nChannels[2], block, 2, dropRate)
blocks[count_stage].add_module('Block3', self.block2)
count_stage += 1
if run_type==1:
self.attention = Self_Attn(nChannels[2], 'relu')
blocks[count_stage].add_module('Attention', self.attention)
count_stage += 1
# 3rd block
self.block3 = NetworkBlock(n, nChannels[2], nChannels[3], block, 2, dropRate)
blocks[count_stage].add_module('Block4', self.block3)
count_stage += 1
#add attention in the final layer
if run_type==2:
self.attention = Self_Attn(nChannels[3], 'relu')
blocks[count_stage].add_module('Attention', self.attention)
count_stage += 1
self._feature_blocks = nn.ModuleList(blocks)
# global average pooling and classifier
self.bn1 = nn.BatchNorm2d(nChannels[3], momentum=0.001)
self.relu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.fc = nn.Linear(nChannels[3], num_classes)
self.nChannels = nChannels[3]
self.run_type=run_type
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight.data)
m.bias.data.zero_()
if run_type==0:
self.all_feat_names = ['block' + str(s + 1) for s in
range(self.num_stages)] + ['classifier', ]
elif run_type==1:
self.all_feat_names = ['block' + str(s + 1) for s in
range(3)]+ ['Attention']+['block' + str(s + 1) for s in
range(3,self.num_stages)] + ['classifier', ]
self.num_stages+=1
elif run_type==2:
self.all_feat_names = ['block' + str(s + 1) for s in
range(self.num_stages)]+ ['Attention'] + ['classifier', ]
def _parse_out_keys_arg(self, out_feat_keys):
"""
:param out_feat_keys:
:return:
the lasy layer index from out_feat_keys
"""
# By default return the features of the last layer / module.
out_feat_keys = [self.all_feat_names[-1], ] if out_feat_keys is None else out_feat_keys
if len(out_feat_keys) == 0:
raise ValueError('Empty list of output feature keys.')
for f, key in enumerate(out_feat_keys):
if key not in self.all_feat_names:
raise ValueError(
'Feature with name {0} does not exist. Existing features: {1}.'.format(key, self.all_feat_names))
elif key in out_feat_keys[:f]:
raise ValueError('Duplicate output feature key: {0}.'.format(key))
# Find the highest output feature in `out_feat_keys
max_out_feat = max([self.all_feat_names.index(key) for key in out_feat_keys])
return out_feat_keys, max_out_feat
def forward(self, x,out_feat_keys=None):
# out = self.conv1(x)
# out = self.block1(out)
# out = self.block2(out)
# out = self.block3(out)
# out = self.relu(self.bn1(out))
# out = F.avg_pool2d(out, 8)
# out = out.view(-1, self.nChannels)
# return self.fc(out)
go_direct_flag=False
if out_feat_keys==None:
go_direct_flag=True
elif self.run_type==0 and 'Attention' in out_feat_keys:
#out_feat_keys.append('block3')
#out_feat_keys.remove('Attention')
out_feat_keys[out_feat_keys.index('Attention')] ='block3'
out_feat_keys, max_out_feat = self._parse_out_keys_arg(out_feat_keys)
out_feats = [None] * len(out_feat_keys)
go_attention_flag = False
feat = x
for f in range(max_out_feat + 1):
key = self.all_feat_names[f]
if key == 'Attention':
go_attention_flag = True
feat, attention = self._feature_blocks[f](feat)
elif key == 'classifier':
feat = self.relu(self.bn1(feat))
feat = F.avg_pool2d(feat, 8)
#feat = feat.view(feat.size(0), -1)
feat=feat.view(-1, self.nChannels)
# feat = self.linear(feat)
feat=self.fc(feat)
else:
feat = self._feature_blocks[f](feat)
if key in out_feat_keys:
out_feats[out_feat_keys.index(key)] = feat
out_feats = out_feats[0] if len(out_feats) == 1 else out_feats
if go_direct_flag:
return out_feats
# here is an important output link to the classifier
if go_attention_flag:
return out_feats, attention
else:
return out_feats, None
class WX_WideResNet_STL(nn.Module):
def __init__(self, num_classes, depth=28, widen_factor=2, dropRate=0.0,run_type=0,num_stages=5):
super(WX_WideResNet_STL, self).__init__()
nChannels = [16, 16*widen_factor, 32*widen_factor, 64*widen_factor,128*widen_factor]
assert((depth - 4) % 6 == 0)
n = (depth - 4) / 6
block = BasicBlock
self.num_stages = num_stages
if run_type == 0:
blocks = [nn.Sequential() for i in range(self.num_stages)]
else:
blocks = [nn.Sequential() for i in range(self.num_stages + 1)]
# 1st conv before any network block
self.conv1 = nn.Conv2d(3, nChannels[0], kernel_size=3, stride=1,
padding=1, bias=False)
count_stage=0
blocks[count_stage].add_module('Block1', self.conv1)
count_stage += 1
# 1st block
self.block1 = NetworkBlock(n, nChannels[0], nChannels[1], block, 1, dropRate, activate_before_residual=True)
blocks[count_stage].add_module('Block2', self.block1)
count_stage += 1
# 2nd block
self.block2 = NetworkBlock(n, nChannels[1], nChannels[2], block, 2, dropRate)
blocks[count_stage].add_module('Block3', self.block2)
count_stage += 1
# 3rd block
self.block3 = NetworkBlock(n, nChannels[2], nChannels[3], block, 2, dropRate)
blocks[count_stage].add_module('Block4', self.block3)
count_stage += 1
#add attention in the final layer
if run_type==1:
self.attention = Self_Attn(nChannels[3], 'relu')
blocks[count_stage].add_module('Attention', self.attention)
count_stage += 1
self.block4 = NetworkBlock(n, nChannels[3], nChannels[4], block, 2, dropRate)
blocks[count_stage].add_module('Block5', self.block4)
count_stage += 1
self._feature_blocks = nn.ModuleList(blocks)
# global average pooling and classifier
self.bn1 = nn.BatchNorm2d(nChannels[4], momentum=0.001)
self.relu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.fc = nn.Linear(nChannels[4], num_classes)
self.nChannels = nChannels[4]
self.run_type=run_type
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight.data)
m.bias.data.zero_()
if run_type==0:
self.all_feat_names = ['block' + str(s + 1) for s in
range(self.num_stages)] + ['classifier', ]
elif run_type==1:
self.all_feat_names = ['block' + str(s + 1) for s in
range(4)]+ ['Attention']+['block' + str(s + 1) for s in
range(4,self.num_stages)] + ['classifier', ]
self.num_stages+=1
def _parse_out_keys_arg(self, out_feat_keys):
"""
:param out_feat_keys:
:return:
the lasy layer index from out_feat_keys
"""
# By default return the features of the last layer / module.
out_feat_keys = [self.all_feat_names[-1], ] if out_feat_keys is None else out_feat_keys
if len(out_feat_keys) == 0:
raise ValueError('Empty list of output feature keys.')
for f, key in enumerate(out_feat_keys):
if key not in self.all_feat_names:
raise ValueError(
'Feature with name {0} does not exist. Existing features: {1}.'.format(key, self.all_feat_names))
elif key in out_feat_keys[:f]:
raise ValueError('Duplicate output feature key: {0}.'.format(key))
# Find the highest output feature in `out_feat_keys
max_out_feat = max([self.all_feat_names.index(key) for key in out_feat_keys])
return out_feat_keys, max_out_feat
def forward(self, x,out_feat_keys=None):
# out = self.conv1(x)
# out = self.block1(out)
# out = self.block2(out)
# out = self.block3(out)
# out = self.relu(self.bn1(out))
# out = F.avg_pool2d(out, 8)
# out = out.view(-1, self.nChannels)
# return self.fc(out)
go_direct_flag=False
if out_feat_keys==None:
go_direct_flag=True
elif self.run_type==0 and 'Attention' in out_feat_keys:
#out_feat_keys.append('block3')
#out_feat_keys.remove('Attention')
out_feat_keys[out_feat_keys.index('Attention')] ='block4'
out_feat_keys, max_out_feat = self._parse_out_keys_arg(out_feat_keys)
out_feats = [None] * len(out_feat_keys)
go_attention_flag = False
feat = x
for f in range(max_out_feat + 1):
key = self.all_feat_names[f]
if key == 'Attention':
go_attention_flag = True
feat, attention = self._feature_blocks[f](feat)
elif key == 'classifier':
feat = self.relu(self.bn1(feat))
feat = F.avg_pool2d(feat, 12)#reduce_mean
#feat = feat.view(feat.size(0), -1)
feat=feat.view(-1, self.nChannels)
# feat = self.linear(feat)
feat=self.fc(feat)
else:
feat = self._feature_blocks[f](feat)
if key in out_feat_keys:
out_feats[out_feat_keys.index(key)] = feat
out_feats = out_feats[0] if len(out_feats) == 1 else out_feats
if go_direct_flag:
return out_feats
# here is an important output link to the classifier
if go_attention_flag:
return out_feats, attention
else:
return out_feats, None
#change backbone again because of google, i don't think it makes any sense
class WX_LargeWideResNet(nn.Module):
def __init__(self, num_classes, depth=28, widen_factor=2, dropRate=0.0,run_type=0,num_stages=4):
super(WX_LargeWideResNet, self).__init__()
nChannels = [16, 135, 135*widen_factor, 270*widen_factor]
assert((depth - 4) % 6 == 0)
n = (depth - 4) / 6
block = BasicBlock
self.num_stages = num_stages
if run_type == 0:
blocks = [nn.Sequential() for i in range(self.num_stages)]
else:
blocks = [nn.Sequential() for i in range(self.num_stages + 1)]
# 1st conv before any network block
self.conv1 = nn.Conv2d(3, nChannels[0], kernel_size=3, stride=1,
padding=1, bias=False)
count_stage=0
blocks[count_stage].add_module('Block1', self.conv1)
count_stage += 1
# 1st block
self.block1 = NetworkBlock(n, nChannels[0], nChannels[1], block, 1, dropRate, activate_before_residual=True)
blocks[count_stage].add_module('Block2', self.block1)
count_stage += 1
# 2nd block
self.block2 = NetworkBlock(n, nChannels[1], nChannels[2], block, 2, dropRate)
blocks[count_stage].add_module('Block3', self.block2)
count_stage += 1
if run_type==1:
self.attention = Self_Attn(nChannels[2], 'relu')
blocks[count_stage].add_module('Attention', self.attention)
count_stage += 1
# 3rd block
self.block3 = NetworkBlock(n, nChannels[2], nChannels[3], block, 2, dropRate)
blocks[count_stage].add_module('Block4', self.block3)
count_stage += 1
#add attention in the final layer
if run_type==2:
self.attention = Self_Attn(nChannels[3], 'relu')
blocks[count_stage].add_module('Attention', self.attention)
count_stage += 1
self._feature_blocks = nn.ModuleList(blocks)
# global average pooling and classifier
self.bn1 = nn.BatchNorm2d(nChannels[3], momentum=0.001)
self.relu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.fc = nn.Linear(nChannels[3], num_classes)
self.nChannels = nChannels[3]
self.run_type=run_type
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight.data)
m.bias.data.zero_()
if run_type==0:
self.all_feat_names = ['block' + str(s + 1) for s in
range(self.num_stages)] + ['classifier', ]
elif run_type==1:
self.all_feat_names = ['block' + str(s + 1) for s in
range(3)]+ ['Attention']+['block' + str(s + 1) for s in
range(3,self.num_stages)] + ['classifier', ]
self.num_stages+=1
elif run_type==2:
self.all_feat_names = ['block' + str(s + 1) for s in
range(self.num_stages)]+ ['Attention'] + ['classifier', ]
def _parse_out_keys_arg(self, out_feat_keys):
"""
:param out_feat_keys:
:return:
the lasy layer index from out_feat_keys
"""
# By default return the features of the last layer / module.
out_feat_keys = [self.all_feat_names[-1], ] if out_feat_keys is None else out_feat_keys
if len(out_feat_keys) == 0:
raise ValueError('Empty list of output feature keys.')
for f, key in enumerate(out_feat_keys):
if key not in self.all_feat_names:
raise ValueError(
'Feature with name {0} does not exist. Existing features: {1}.'.format(key, self.all_feat_names))
elif key in out_feat_keys[:f]:
raise ValueError('Duplicate output feature key: {0}.'.format(key))
# Find the highest output feature in `out_feat_keys
max_out_feat = max([self.all_feat_names.index(key) for key in out_feat_keys])
return out_feat_keys, max_out_feat
def forward(self, x,out_feat_keys=None):
# out = self.conv1(x)
# out = self.block1(out)
# out = self.block2(out)
# out = self.block3(out)
# out = self.relu(self.bn1(out))
# out = F.avg_pool2d(out, 8)
# out = out.view(-1, self.nChannels)
# return self.fc(out)
go_direct_flag=False
if out_feat_keys==None:
go_direct_flag=True
elif self.run_type==0 and 'Attention' in out_feat_keys:
#out_feat_keys.append('block3')
#out_feat_keys.remove('Attention')
out_feat_keys[out_feat_keys.index('Attention')] ='block3'
out_feat_keys, max_out_feat = self._parse_out_keys_arg(out_feat_keys)
out_feats = [None] * len(out_feat_keys)
go_attention_flag = False
feat = x
for f in range(max_out_feat + 1):
key = self.all_feat_names[f]
if key == 'Attention':
go_attention_flag = True
feat, attention = self._feature_blocks[f](feat)
elif key == 'classifier':
feat = self.relu(self.bn1(feat))
feat = F.avg_pool2d(feat, 8)
#feat = feat.view(feat.size(0), -1)
feat=feat.view(-1, self.nChannels)
# feat = self.linear(feat)
feat=self.fc(feat)
else:
feat = self._feature_blocks[f](feat)
if key in out_feat_keys:
out_feats[out_feat_keys.index(key)] = feat
out_feats = out_feats[0] if len(out_feats) == 1 else out_feats
if go_direct_flag:
return out_feats
# here is an important output link to the classifier
if go_attention_flag:
return out_feats, attention
else:
return out_feats, None
| 45.142857 | 140 | 0.578662 | 2,889 | 22,120 | 4.211838 | 0.07442 | 0.048323 | 0.065089 | 0.030243 | 0.908941 | 0.896203 | 0.892916 | 0.891108 | 0.879109 | 0.862837 | 0 | 0.02734 | 0.305515 | 22,120 | 489 | 141 | 45.235174 | 0.764744 | 0.11094 | 0 | 0.855956 | 0 | 0 | 0.043079 | 0 | 0 | 0 | 0 | 0 | 0.00831 | 1 | 0.047091 | false | 0 | 0.01385 | 0.00554 | 0.124654 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b91f99cb3ceb55a09bb85f675a7f9bf02a95027c | 5,313 | py | Python | dl/layers/pooling.py | nuka137/DeepLearningFramework | 613881e46b48c2206b9424a49106455cb2336d2e | [
"MIT"
] | 10 | 2020-06-28T05:50:41.000Z | 2022-01-30T01:31:43.000Z | dl/layers/pooling.py | nuka137/DeepLearningFramework | 613881e46b48c2206b9424a49106455cb2336d2e | [
"MIT"
] | null | null | null | dl/layers/pooling.py | nuka137/DeepLearningFramework | 613881e46b48c2206b9424a49106455cb2336d2e | [
"MIT"
] | 1 | 2020-07-26T12:36:32.000Z | 2020-07-26T12:36:32.000Z | import numpy as np
from .layer_base import LayerBase
class MaxPooling2DLayer(LayerBase):
def __init__(self, kernel_size, stride=1, padding=0):
super().__init__()
self.cache = {}
self.hparams = {
"kernel_size": kernel_size,
"stride": stride,
"padding": padding,
}
def id(self):
return "MaxPooling2D"
def forward(self, x):
stride = self.hparams["stride"]
padding = self.hparams["padding"]
f = self.hparams["kernel_size"]
m, c, hi, wi = x.shape
ho = int((hi - f + 2 * padding) / stride) + 1
wo = int((wi - f + 2 * padding) / stride) + 1
y = np.zeros((m, c, ho, wo))
x_pad = np.pad(x, ((0, 0), (0, 0), (padding, padding), (padding, padding)),
mode="constant", constant_values=(0, 0))
for i in range(m):
xm = x_pad[i]
for h in range(ho):
h_start = h * stride
h_end = h * stride + f
for w in range(wo):
w_start = w * stride
w_end = w * stride + f
xm_sliced = xm[:, h_start:h_end, w_start:w_end]
xm_sliced_max = np.max(xm_sliced, axis=(1, 2))
y[i, :, h, w] = xm_sliced_max
self.cache["x"] = x
return y
def backward(self, dy):
x = self.cache["x"]
stride = self.hparams["stride"]
padding = self.hparams["padding"]
f = self.hparams["kernel_size"]
m, ci, hi, wi = x.shape
m, co, ho, wo = dy.shape
assert ci == co, "Channel does not match between mask and dy"
dx = np.zeros((m, ci, hi, wi))
x_pad = np.pad(x, ((0, 0), (0, 0), (padding, padding), (padding, padding)),
mode="constant", constant_values=(0, 0))
dx_pad = np.pad(dx, ((0, 0), (0, 0), (padding, padding), (padding, padding)),
mode="constant", constant_values=(0, 0))
for i in range(m):
xm_pad = x_pad[i]
dxm = dx_pad[i]
for h in range(ho):
for w in range(wo):
for c in range(co):
h_start = h * stride
h_end = h * stride + f
w_start = w * stride
w_end = w * stride + f
xm_sliced = xm_pad[c, h_start:h_end, w_start:w_end]
xm_sliced_max = np.max(xm_sliced)
mask = xm_sliced == xm_sliced_max
dxm[c, h_start:h_end, w_start:w_end] += mask * dy[i, c, h, w]
if padding != 0:
dx[i] = dxm[:, padding:-padding, padding:-padding]
else:
dx[i] = dxm
return dx
class AveragePooling2DLayer(LayerBase):
def __init__(self, kernel_size, stride=1, padding=0):
super().__init__()
self.cache = {}
self.hparams = {
"kernel_size": kernel_size,
"stride": stride,
"padding": padding,
}
def id(self):
return "AveragePooling2D"
def forward(self, x):
stride = self.hparams["stride"]
padding = self.hparams["padding"]
f = self.hparams["kernel_size"]
m, c, hi, wi = x.shape
ho = int((hi - f + 2 * padding) / stride) + 1
wo = int((wi - f + 2 * padding) / stride) + 1
y = np.zeros((m, c, ho, wo))
x_pad = np.pad(x, ((0, 0), (0, 0), (padding, padding), (padding, padding)),
mode="constant", constant_values=(0, 0))
for i in range(m):
xm = x_pad[i]
for h in range(ho):
h_start = h * stride
h_end = h * stride + f
for w in range(wo):
w_start = w * stride
w_end = w * stride + f
xm_sliced = xm[:, h_start:h_end, w_start:w_end]
xm_sliced_mean = np.mean(xm_sliced, axis=(1, 2))
y[i, :, h, w] = xm_sliced_mean
self.cache["x"] = x
return y
def backward(self, dy):
x = self.cache["x"]
stride = self.hparams["stride"]
padding = self.hparams["padding"]
f = self.hparams["kernel_size"]
m, ci, hi, wi = x.shape
m, co, ho, wo = dy.shape
assert ci == co, "Channel does not match between mask and dy"
dx = np.zeros((m, ci, hi, wi))
dx_pad = np.pad(dx, ((0, 0), (0, 0), (padding, padding), (padding, padding)),
mode="constant", constant_values=(0, 0))
for i in range(m):
dxm = dx_pad[i]
for h in range(ho):
for w in range(wo):
for c in range(co):
h_start = h * stride
h_end = h * stride + f
w_start = w * stride
w_end = w * stride + f
dxm[c, h_start:h_end, w_start:w_end] += dy[i, c, h, w] / (f * f)
if padding != 0:
dx[i] = dxm[:, padding:-padding, padding:-padding]
else:
dx[i] = dxm
return dx
| 31.252941 | 88 | 0.453228 | 682 | 5,313 | 3.385631 | 0.108504 | 0.139454 | 0.127328 | 0.084885 | 0.923777 | 0.918579 | 0.918579 | 0.918579 | 0.918579 | 0.918146 | 0 | 0.016763 | 0.416149 | 5,313 | 169 | 89 | 31.43787 | 0.727595 | 0 | 0 | 0.859375 | 0 | 0 | 0.056476 | 0 | 0 | 0 | 0 | 0 | 0.015625 | 1 | 0.0625 | false | 0 | 0.015625 | 0.015625 | 0.140625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b93d92233fff27afb7def16040ff978b27366c7a | 154 | py | Python | encript.py | the7s/swy_novel_server | a0ee3685df0ee6e0b260adb9a9d5b7fab93fdd2e | [
"Apache-2.0"
] | null | null | null | encript.py | the7s/swy_novel_server | a0ee3685df0ee6e0b260adb9a9d5b7fab93fdd2e | [
"Apache-2.0"
] | null | null | null | encript.py | the7s/swy_novel_server | a0ee3685df0ee6e0b260adb9a9d5b7fab93fdd2e | [
"Apache-2.0"
] | null | null | null |
def encrypt(string):
return string.replace('/', '^').replace('.', '*')
def decrypt(string):
return string.replace('^', '/').replace('*', '.')
| 17.111111 | 53 | 0.545455 | 14 | 154 | 6 | 0.428571 | 0.285714 | 0.428571 | 0.595238 | 0.761905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.155844 | 154 | 8 | 54 | 19.25 | 0.646154 | 0 | 0 | 0 | 0 | 0 | 0.052632 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
b96cfac0e994eb49833766d9bb40c01a9f97b031 | 190 | py | Python | tavern/util/loader/__init__.py | BangWork/tavern | 050308841461894a28b07bd2ece85a9b48ff2df4 | [
"MIT"
] | null | null | null | tavern/util/loader/__init__.py | BangWork/tavern | 050308841461894a28b07bd2ece85a9b48ff2df4 | [
"MIT"
] | null | null | null | tavern/util/loader/__init__.py | BangWork/tavern | 050308841461894a28b07bd2ece85a9b48ff2df4 | [
"MIT"
] | null | null | null | from .convert_token import *
from .load_case import *
from .load_json_schema import *
from .type_sentinel import *
from .other import *
from .path_loader import *
from .yaml_loader import *
| 23.75 | 31 | 0.778947 | 28 | 190 | 5.035714 | 0.5 | 0.425532 | 0.198582 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147368 | 190 | 7 | 32 | 27.142857 | 0.87037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b9bd33e69d7d6e7abb55104440225753d7f57031 | 1,611 | py | Python | samplesheets/migrations/0006_update_uuid.py | bihealth/sodar-server | 0c6a03c274ab34cd8987280fe97dc8989551d4bd | [
"MIT"
] | null | null | null | samplesheets/migrations/0006_update_uuid.py | bihealth/sodar-server | 0c6a03c274ab34cd8987280fe97dc8989551d4bd | [
"MIT"
] | 1 | 2021-05-28T10:59:49.000Z | 2021-06-03T12:30:23.000Z | samplesheets/migrations/0006_update_uuid.py | bihealth/sodar-server | 0c6a03c274ab34cd8987280fe97dc8989551d4bd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-23 10:53
from __future__ import unicode_literals
import uuid
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samplesheets', '0005_rename_uuid'),
]
operations = [
migrations.AlterField(
model_name='assay',
name='sodar_uuid',
field=models.UUIDField(default=uuid.uuid4, help_text='SODAR UUID for the object', unique=True),
),
migrations.AlterField(
model_name='genericmaterial',
name='sodar_uuid',
field=models.UUIDField(default=uuid.uuid4, help_text='SODAR UUID for the object', unique=True),
),
migrations.AlterField(
model_name='investigation',
name='sodar_uuid',
field=models.UUIDField(default=uuid.uuid4, help_text='SODAR UUID for the object', unique=True),
),
migrations.AlterField(
model_name='process',
name='sodar_uuid',
field=models.UUIDField(default=uuid.uuid4, help_text='SODAR UUID for the object', unique=True),
),
migrations.AlterField(
model_name='protocol',
name='sodar_uuid',
field=models.UUIDField(default=uuid.uuid4, help_text='SODAR UUID for the object', unique=True),
),
migrations.AlterField(
model_name='study',
name='sodar_uuid',
field=models.UUIDField(default=uuid.uuid4, help_text='SODAR UUID for the object', unique=True),
),
]
| 33.5625 | 107 | 0.609559 | 176 | 1,611 | 5.4375 | 0.289773 | 0.112853 | 0.15674 | 0.181818 | 0.703239 | 0.703239 | 0.703239 | 0.703239 | 0.703239 | 0.703239 | 0 | 0.023993 | 0.275605 | 1,611 | 47 | 108 | 34.276596 | 0.796058 | 0.042831 | 0 | 0.615385 | 1 | 0 | 0.189084 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b9c16b2ab6e07f920a8ed23687143e86af6d643f | 19,109 | py | Python | docs/custom_iwdee_table_data.py | xanthics/infinity_pickpocket_list | a08a9f202f039aef6b79d1478eaf8c447f3d63d8 | [
"MIT"
] | 1 | 2021-05-29T07:06:35.000Z | 2021-05-29T07:06:35.000Z | docs/custom_iwdee_table_data.py | xanthics/infinity_pickpocket_list | a08a9f202f039aef6b79d1478eaf8c447f3d63d8 | [
"MIT"
] | null | null | null | docs/custom_iwdee_table_data.py | xanthics/infinity_pickpocket_list | a08a9f202f039aef6b79d1478eaf8c447f3d63d8 | [
"MIT"
] | null | null | null | data = [
["Area", "NPC", "XP", "Gold Carried", "Pickpocket Skill", "Item Price (base)", "Item Type", "Item"],
["animtest (Spawned)", "Arundel", 3000, 0, 10, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["animtest (Spawned)", "Krilag", 120000, 5, 10, 3500, "Ring", "Ring of Reckless Action", "Ring_Ring of Reckless Action"],
["animtest (Spawned)", "Krilag", 120000, 5, 10, 0, "Gold pieces", "Gold Piece (10000)", "Gold pieces_Gold Piece"],
["animtest (Spawned)", "Krilag", 120000, 5, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["animtest (Spawned)", "Krilag", 120000, 5, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["animtest (Spawned)", "Krilag", 120000, 5, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["animtest (Spawned)", "Orc", 650, 0, 10, 30, "Arrows", "Arrow of Fire +1 (3)", "Arrows_Arrow of Fire +1"],
["ar1001 - Easthaven (prologue) - Temple of Tempus", "Everard", 5000, 21, 10, 200, "Potion", "Potion of Healing", "Potion_Potion of Healing"],
["ar1001 - Easthaven (prologue) - Temple of Tempus", "Everard", 5000, 21, 10, 200, "Potion", "Potion of Healing", "Potion_Potion of Healing"],
["ar1008 - Easthaven (prologue) - Snowdrift Inn", "Erevain Blacksheaf", 6000, 87, 10, 75, "Armor", "Chain Mail", "Armor_Chain Mail"],
["ar1100 - Easthaven (finale)", "Everard", 5000, 21, 10, 200, "Potion", "Potion of Healing", "Potion_Potion of Healing"],
["ar1100 - Easthaven (finale)", "Everard", 5000, 21, 10, 200, "Potion", "Potion of Healing", "Potion_Potion of Healing"],
["ar1101 - Easthaven (finale) - Temple of Tempus/ice tower first floor", "Everard", 5000, 21, 10, 200, "Potion", "Potion of Healing", "Potion_Potion of Healing"],
["ar1101 - Easthaven (finale) - Temple of Tempus/ice tower first floor", "Everard", 5000, 21, 10, 200, "Potion", "Potion of Healing", "Potion_Potion of Healing"],
["ar2003 - Kuldahar Pass - Gherg's tower", "Ghereg the Greater Ogre", 12000, 0, 10, 4000, "Potion", "Potion of Life Transference", "Potion_Potion of Life Transference"],
["ar2004 - Kuldahar Pass - Mill - entrance", "Orc", 650, 0, 10, 30, "Arrows", "Arrow of Fire +1 (3)", "Arrows_Arrow of Fire +1"],
["ar2004 - Kuldahar Pass - Mill - entrance", "Uligar", 2000, 55, 10, 500, "Ring", "Ring of the Warrior", "Ring_Ring of the Warrior"],
["ar2004 - Kuldahar Pass - Mill - entrance", "Uligar", 2000, 55, 10, 200, "Scroll", "Blur", "Scroll_Blur"],
["ar2100 (Spawned) - Kuldahar", "Arundel", 3000, 0, 10, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar2102 - Kuldahar - Orrick the Grey's tower - study", "Orrick the Gray", 4000, 47, 10, 1800, "Amulet", "Amulet of Metaspell Influence", "Amulet_Amulet of Metaspell Influence"],
["ar2108 - Kuldahar - Airship of Oswald Fiddlebender", "Oswald Fiddlebender", 975, 23, 10, 10000, "Amulet", "Necklace of Missiles", "Amulet_Necklace of Missiles"],
["ar2108 - Kuldahar - Airship of Oswald Fiddlebender", "Oswald Fiddlebender", 975, 23, 10, 9000, "Ring", "Ring of Protection +2", "Ring_Ring of Protection +2"],
["ar2108 - Kuldahar - Airship of Oswald Fiddlebender", "Oswald Fiddlebender", 975, 23, 10, 500, "Potion", "Oil of Fiery Burning", "Potion_Oil of Fiery Burning"],
["ar2108 - Kuldahar - Airship of Oswald Fiddlebender", "Oswald Fiddlebender", 975, 23, 10, 500, "Potion", "Oil of Fiery Burning", "Potion_Oil of Fiery Burning"],
["ar2108 - Kuldahar - Airship of Oswald Fiddlebender", "Oswald Fiddlebender", 975, 23, 10, 250, "Potion", "Potion of Firebreath", "Potion_Potion of Firebreath"],
["ar2112 - Kuldahar - Home of Arundel - first floor", "Arundel", 3000, 0, 10, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar3301 - Vale of Shadows - Temple of Myrkul", "Therik", 5000, 0, 10, 0, "Books & misc", "rndtre50.itm", "Books & misc_rndtre50.itm"],
["ar3501 - Vale of Shadows - Tomb of Kresselack - first level", "Mytos", 6000, 0, 10, 8000, "Potion", "Potion of Holy Transference", "Potion_Potion of Holy Transference"],
["ar4003 - Dragon's Eye - third dungeon level (Presio)", "Undead Lieutenant", 7000, 0, 10, 25, "Large sword", "Bastard Sword ", "Large sword_Bastard Sword "],
["ar4003 - Dragon's Eye - third dungeon level (Presio)", "Undead Lieutenant", 7000, 0, 10, 25, "Large sword", "Bastard Sword ", "Large sword_Bastard Sword "],
["ar4004 - Dragon's Eye - fourth dungeon level (Eldathyn)", "Albion", 9000, 0, 10, 150, "Darts", "Asp's Nest +1 (5)", "Darts_Asp's Nest +1"],
["ar4004 - Dragon's Eye - fourth dungeon level (Eldathyn)", "Eldathyn", 2000, 0, 10, 150, "Darts", "Asp's Nest +1 (3)", "Darts_Asp's Nest +1"],
["ar4004 - Dragon's Eye - fourth dungeon level (Eldathyn)", "Eldathyn", 2000, 0, 10, 150, "Darts", "Asp's Nest +1 (3)", "Darts_Asp's Nest +1"],
["ar4004 - Dragon's Eye - fourth dungeon level (Eldathyn)", "Geelo the Librarian", 7500, 0, 10, 150, "Darts", "Asp's Nest +1 (3)", "Darts_Asp's Nest +1"],
["ar4004 - Dragon's Eye - fourth dungeon level (Eldathyn)", "The High Ritualist", 10000, 0, 10, 5000, "Books & misc", "Tome of Understanding", "Books & misc_Tome of Understanding"],
["ar4004 - Dragon's Eye - fourth dungeon level (Eldathyn)", "The High Summoner", 7500, 0, 10, 16305, "Quarterstaff", "The Summoner's Staff +3", "Quarterstaff_The Summoner's Staff +3"],
["ar4005 - Dragon's Eye - fifth dungeon level (Yxunomei)", "The High Torturer", 8000, 0, 10, 800, "Ring", "Ring of Pain Amplification", "Ring_Ring of Pain Amplification"],
["ar4005 - Dragon's Eye - fifth dungeon level (Yxunomei)", "The High Torturer", 8000, 0, 10, 500, "Potion", "Oil of Speed", "Potion_Oil of Speed"],
["ar5401 - Severed Hand - Tower of Sheverash - first floor, Kaylessa", "Kaylessa", 40000, 0, 10, 6000, "Amulet", "Symbol of Solonor Thelandira", "Amulet_Symbol of Solonor Thelandira"],
["ar5401 - Severed Hand - Tower of Sheverash - first floor, Kaylessa", "Kaylessa", 40000, 0, 10, 100, "Arrows", "Arrow of the Hand +8 (40)", "Arrows_Arrow of the Hand +8"],
["ar5404 - Severed Hand - Tower of Sheverash - fourth floor", "Ancient Wyvern", 80000, 7669, 10, 5000, "Potion", "Potion of Vitality", "Potion_Potion of Vitality"],
["ar5404 - Severed Hand - Tower of Sheverash - fourth floor", "Ancient Wyvern", 80000, 7669, 10, 1500, "Gem", "Emerald (3)", "Gem_Emerald"],
["ar5404 - Severed Hand - Tower of Sheverash - fourth floor", "Ancient Wyvern", 80000, 7669, 10, 375, "Gem", "Moonbar Gem (3)", "Gem_Moonbar Gem"],
["ar5404 - Severed Hand - Tower of Sheverash - fourth floor", "Ancient Wyvern", 80000, 7669, 10, 50, "Gem", "Moonstone Gem (5)", "Gem_Moonstone Gem"],
["ar5404 - Severed Hand - Tower of Sheverash - fourth floor", "Ancient Wyvern", 80000, 7669, 10, 0, "Books & misc", "mernmanu.itm (TLK missing name)", "Books & misc_mernmanu.itm (TLK missing name)"],
["ar5404 - Severed Hand - Tower of Sheverash - fourth floor", "Ancient Wyvern", 80000, 7669, 10, 0, "Books & misc", "merntome.itm (TLK missing name)", "Books & misc_merntome.itm (TLK missing name)"],
["ar6003 - Dorn's Deep - orog cave, Saablic, Krilag", "Krilag", 120000, 5, 10, 3500, "Ring", "Ring of Reckless Action", "Ring_Ring of Reckless Action"],
["ar6003 - Dorn's Deep - orog cave, Saablic, Krilag", "Krilag", 120000, 5, 10, 0, "Gold pieces", "Gold Piece (10000)", "Gold pieces_Gold Piece"],
["ar6003 - Dorn's Deep - orog cave, Saablic, Krilag", "Krilag", 120000, 5, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar6003 - Dorn's Deep - orog cave, Saablic, Krilag", "Krilag", 120000, 5, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar6003 - Dorn's Deep - orog cave, Saablic, Krilag", "Krilag", 120000, 5, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar6014 - Dorn's Deep - Bandoth's cave", "Bandoth", 30000, 0, 10, 3000, "Ring", "Ring of Aura Transfusion", "Ring_Ring of Aura Transfusion"],
["ar7001 - Wyrm's Tooth Glacier - aquarium interior, ice salamander lair", "Vera", 1400, 0, 25, 6000, "Amulet", "Amulet of Protection", "Amulet_Amulet of Protection"],
["ar7004 - Wyrm's Tooth Glacier - frost giant cave", "Joril", 150000, 0, 10, 5000, "Books & misc", "Manual of Gainful Exercise", "Books & misc_Manual of Gainful Exercise"],
["ar7004 - Wyrm's Tooth Glacier - frost giant cave", "Kontik", 60000, 0, 10, 35000, "Ring", "Kontik's Ring of Wizardry", "Ring_Kontik's Ring of Wizardry"],
["ar7004 - Wyrm's Tooth Glacier - frost giant cave", "Kontik", 60000, 0, 10, 9000, "Ring", "Ring of Protection +2", "Ring_Ring of Protection +2"],
["ar7004 - Wyrm's Tooth Glacier - frost giant cave", "Kontik", 60000, 0, 10, 5000, "Books & misc", "Tome of Clear Thought", "Books & misc_Tome of Clear Thought"],
["ar8005 - Lower Dorn's Deep - Order of the Kraken garde", "Callard", 300, 0, 10, 7, "Scroll", "Portrait of Marketh", "Scroll_Portrait of Marketh"],
["ar8007 (Spawned) - Lower Dorn's Deep - Order of the Kraken manor - second floor, Mekrath", "Kraken Society Mage", 30000, 50, 10, 0, "Books & misc", "merntome.itm (TLK missing name)", "Books & misc_merntome.itm (TLK missing name)"],
["ar8010 (Spawned) - Lower Dorn's Deep - Malavon's lair", "Malavon", 200000, 0, 10, 50, "Ring", "Onyx Ring", "Ring_Onyx Ring"],
["ar8010 - Lower Dorn's Deep - Malavon's lair", "Blind Mycohulk", 12000, 0, 10, 40, "Books & misc", "Umber Hulk Hide", "Books & misc_Umber Hulk Hide"],
["ar8011 - Lower Dorn's Deep - forge, Ilmadia", "Maiden Ilmadia", 200000, 85, 10, 18000, "Amulet", "Symbol of Corellon Larethian", "Amulet_Symbol of Corellon Larethian"],
["ar8011 - Lower Dorn's Deep - forge, Ilmadia", "Maiden Ilmadia", 200000, 85, 10, 100, "Arrows", "Arrow of the Hand +8 (80)", "Arrows_Arrow of the Hand +8"],
["ar8011 - Lower Dorn's Deep - forge, Ilmadia", "Maiden Ilmadia", 200000, 85, 10, 0, "Books & misc", "Maiden Ilmadia's Badge", "Books & misc_Maiden Ilmadia's Badge"],
["ar9100 (Spawned) - Lonelywood", "Alpheus", 60000, 69, 10, 28000, "Amulet", "Kossuth's Blood", "Amulet_Kossuth's Blood"],
["ar9100 (Spawned) - Lonelywood", "Alpheus", 60000, 69, 10, 10000, "Scroll", "Dragon's Breath", "Scroll_Dragon's Breath"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 500, "Potion", "Oil of Speed", "Potion_Oil of Speed"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 500, "Potion", "Oil of Speed", "Potion_Oil of Speed"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["ar9100 (Spawned) - Lonelywood", "Purvis", 90000, 87, 15, 500, "Potion", "Oil of Speed", "Potion_Oil of Speed"],
["ar9100 (Spawned) - Lonelywood", "Vaarglan", 60000, 113, 10, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9100 (Spawned) - Lonelywood", "Vaarglan", 60000, 113, 10, 10000, "Potion", "Potion of Fast Casting", "Potion_Potion of Fast Casting"],
["ar9100 (Spawned) - Lonelywood", "Vaarglan", 60000, 113, 10, 9000, "Ring", "Ring of Protection +2", "Ring_Ring of Protection +2"],
["ar9100 (Spawned) - Lonelywood", "Vaarglan", 60000, 113, 10, 1800, "Amulet", "Amulet of Metaspell Influence", "Amulet_Amulet of Metaspell Influence"],
["ar9100 (Spawned) - Lonelywood", "Vaarglan", 60000, 113, 10, 600, "Scroll", "Emotion, Hope", "Scroll_Emotion, Hope"],
["ar9101 - Lonelywood - Whistling Gallows - first floor", "Hobart Stubbletoes", 40000, 89, 13, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar9101 - Lonelywood - Whistling Gallows - first floor", "Kieran Nye", 4000, 54, 10, 5000, "Ring", "Ring of Protection +4", "Ring_Ring of Protection +4"],
["ar9106 (Spawned) - Lonelywood - Thurlow home - first floor", "Purvis", 90000, 87, 15, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9106 (Spawned) - Lonelywood - Thurlow home - first floor", "Purvis", 90000, 87, 15, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["ar9106 (Spawned) - Lonelywood - Thurlow home - first floor", "Purvis", 90000, 87, 15, 500, "Potion", "Oil of Speed", "Potion_Oil of Speed"],
["ar9107 (Spawned) - Lonelywood - Thurlow home - second floor", "Purvis", 90000, 87, 15, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9107 (Spawned) - Lonelywood - Thurlow home - second floor", "Purvis", 90000, 87, 15, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["ar9107 (Spawned) - Lonelywood - Thurlow home - second floor", "Purvis", 90000, 87, 15, 500, "Potion", "Oil of Speed", "Potion_Oil of Speed"],
["ar9110 - Lonelywood - Home of Purvis", "Purvis", 90000, 87, 15, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9110 - Lonelywood - Home of Purvis", "Purvis", 90000, 87, 15, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["ar9501 - Gloomfrost interior - first level, Tiernon", "Tiernon", 975, 0, 10, 1750, "Amulet", "Tiernon's Hearthstone", "Amulet_Tiernon's Hearthstone"],
["ar9700 - Anauroch Castle - outer courtyard (TotL start area)", "Ancient Timewyvern", 100000, 12544, 10, 40000, "Potion", "Haste Potion", "Potion_Haste Potion"],
["ar9700 - Anauroch Castle - outer courtyard (TotL start area)", "Ancient Timewyvern", 100000, 12544, 10, 5000, "Gem", "Rogue Stone", "Gem_Rogue Stone"],
["ar9700 - Anauroch Castle - outer courtyard (TotL start area)", "Ancient Timewyvern", 100000, 12544, 10, 0, "Books & misc", "mernmanu.itm (TLK missing name)", "Books & misc_mernmanu.itm (TLK missing name)"],
["ar9700 - Anauroch Castle - outer courtyard (TotL start area)", "Ancient Timewyvern", 100000, 12544, 10, 0, "Books & misc", "merntome.itm (TLK missing name)", "Books & misc_merntome.itm (TLK missing name)"],
["ar9700 - Anauroch Castle - outer courtyard (TotL start area)", "Hobart Stubbletoes", 40000, 89, 13, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar9704 - Anauroch Castle - west Tower upstairs - Harald", "Harald", 20000, 91, 10, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9704 - Anauroch Castle - west Tower upstairs - Harald", "Harald", 20000, 91, 10, 5000, "Books & misc", "Tome of Leadership and Influence", "Books & misc_Tome of Leadership and Influence"],
["ar9706 - Anauroch Castle - north Tower upstairs - harpy queen", "Harpy Queen", 60000, 0, 10, 5000, "Ring", "Ring of Protection +4", "Ring_Ring of Protection +4"],
["ar9706 - Anauroch Castle - north Tower upstairs - harpy queen", "Harpy Queen", 60000, 0, 10, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["ar9706 - Anauroch Castle - north Tower upstairs - harpy queen", "Harpy Queen", 60000, 0, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar9706 - Anauroch Castle - north Tower upstairs - harpy queen", "Harpy Queen", 60000, 0, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar9706 - Anauroch Castle - north Tower upstairs - harpy queen", "Harpy Queen", 60000, 0, 10, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Adran Runeshadow", 90000, 1230, 10, 10000, "Scroll", "Wail of the Banshee", "Scroll_Wail of the Banshee"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Adran Runeshadow", 90000, 1230, 10, 10000, "Scroll", "Comet", "Scroll_Comet"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Adran Runeshadow", 90000, 1230, 10, 9000, "Ring", "Ring of Protection +2", "Ring_Ring of Protection +2"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Adran Runeshadow", 90000, 1230, 10, 5000, "Books & misc", "Tome of Clear Thought", "Books & misc_Tome of Clear Thought"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Adran Runeshadow", 90000, 1230, 10, 3000, "Ring", "Ring of Aura Transfusion", "Ring_Ring of Aura Transfusion"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Criek of Bane", 90000, 231, 10, 5000, "Ring", "Ring of Protection +4", "Ring_Ring of Protection +4"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Shelhai", 90000, 800, 10, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["ar9708 - Anauroch Castle - east Tower upstairs - Banites", "Shelhai", 90000, 800, 10, 1000, "Ring", "Greater Ring of the Warrior", "Ring_Greater Ring of the Warrior"],
["ar9715 - Anauroch Castle - hideout of Hobart", "Hobart Stubbletoes", 40000, 89, 13, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["unknown", "Adran Runeshadow (animtest)", 6000, 1230, 10, 9000, "Ring", "Ring of Protection +2", "Ring_Ring of Protection +2"],
["unknown", "Adran Runeshadow (animtest)", 6000, 1230, 10, 1500, "Ring", "Ring of Fire Resistance", "Ring_Ring of Fire Resistance"],
["unknown", "Baern (hparty02)", 0, 0, 10, 3500, "Ring", "Ring of Protection +3", "Ring_Ring of Protection +3"],
["unknown", "Gorris (hparty01)", 0, 0, 10, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["unknown", "Gorris (hparty01)", 0, 0, 10, 8000, "Amulet", "Great Black Wolf Talisman", "Amulet_Great Black Wolf Talisman"],
["unknown", "Hobart Stubbletoes (hobarth)", 40000, 89, 13, 0, "Books & misc", "mernbook.itm (TLK missing name)", "Books & misc_mernbook.itm (TLK missing name)"],
["unknown", "Ilauna (hparty05)", 0, 0, 10, 1800, "Amulet", "Amulet of Metaspell Influence", "Amulet_Amulet of Metaspell Influence"],
["unknown", "Kirika (hparty04)", 0, 0, 10, 13500, "Ring", "Ring of Shadows", "Ring_Ring of Shadows"],
["unknown", "Neo-Orog Marauder (neoorogs)", 8000, 0, 10, 0, "Gold pieces", "Gold Piece (250)", "Gold pieces_Gold Piece"],
["unknown", "Purvis (purvish)", 90000, 87, 15, 10000, "Ring", "Ring of Free Action", "Ring_Ring of Free Action"],
["unknown", "Purvis (purvish)", 90000, 87, 15, 500, "Potion", "Potion of Extra Healing", "Potion_Potion of Extra Healing"],
["unknown", "Purvis (purvish)", 90000, 87, 15, 500, "Potion", "Oil of Speed", "Potion_Oil of Speed"],
]
| 149.289063 | 234 | 0.677115 | 2,677 | 19,109 | 4.787075 | 0.140456 | 0.030901 | 0.048381 | 0.047757 | 0.860632 | 0.803902 | 0.765197 | 0.727273 | 0.705501 | 0.691533 | 0 | 0.120527 | 0.150296 | 19,109 | 127 | 235 | 150.464567 | 0.66872 | 0 | 0 | 0.23622 | 0 | 0.007874 | 0.719975 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.031496 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b9c34490eee2ea8aea613ba9ec752a9503fd7e4f | 28,736 | py | Python | zerver/webhooks/github/tests.py | adehnert/zulip | 58adac56638e1a7f3097aaaee9a3accd67a4fd24 | [
"Apache-2.0"
] | 1 | 2016-12-08T13:14:30.000Z | 2016-12-08T13:14:30.000Z | zerver/webhooks/github/tests.py | adehnert/zulip | 58adac56638e1a7f3097aaaee9a3accd67a4fd24 | [
"Apache-2.0"
] | null | null | null | zerver/webhooks/github/tests.py | adehnert/zulip | 58adac56638e1a7f3097aaaee9a3accd67a4fd24 | [
"Apache-2.0"
] | 1 | 2017-10-22T16:36:33.000Z | 2017-10-22T16:36:33.000Z | from mock import MagicMock, patch
from zerver.lib.test_classes import WebhookTestCase
from zerver.lib.webhooks.git import COMMITS_LIMIT
class GithubWebhookTest(WebhookTestCase):
STREAM_NAME = 'github'
URL_TEMPLATE = "/api/v1/external/github?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = 'github'
EXPECTED_TOPIC_REPO_EVENTS = u"public-repo"
EXPECTED_TOPIC_ISSUE_EVENTS = u"public-repo / Issue #2 Spelling error in the README file"
EXPECTED_TOPIC_PR_EVENTS = u"public-repo / PR #1 Update the README with new information"
EXPECTED_TOPIC_DEPLOYMENT_EVENTS = u"public-repo / Deployment on production"
EXPECTED_TOPIC_ORGANIZATION_EVENTS = u"baxterandthehackers organization"
EXPECTED_TOPIC_BRANCH_EVENTS = u"public-repo / changes"
EXPECTED_TOPIC_WIKI_EVENTS = u"public-repo / Wiki Pages"
def test_ping_event(self) -> None:
expected_message = u"GitHub webhook has been successfully configured by TomaszKolek."
self.send_and_test_stream_message('ping', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_star_event(self) -> None:
expected_message = u"Codertocat starred the repository."
expected_topic = u"Hello-World"
self.send_and_test_stream_message('star', expected_topic, expected_message)
def test_ping_organization_event(self) -> None:
expected_message = u"GitHub webhook has been successfully configured by eeshangarg."
self.send_and_test_stream_message('ping__organization', 'zulip-test-org', expected_message)
def test_push_delete_branch(self) -> None:
expected_message = u"eeshangarg [deleted](https://github.com/eeshangarg/public-repo/compare/2e8cf535fb38...000000000000) the branch feature."
self.send_and_test_stream_message('push__delete_branch', u"public-repo / feature", expected_message)
def test_push_local_branch_without_commits(self) -> None:
expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/feature) the branch feature."
self.send_and_test_stream_message('push__local_branch_without_commits', u"public-repo / feature", expected_message)
def test_push_1_commit(self) -> None:
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))"
self.send_and_test_stream_message('push__1_commit', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_1_commit_without_username(self) -> None:
expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/0383613da871...2e8cf535fb38) 1 commit to branch changes. Commits by John Snow (1).\n\n* Update the README ([2e8cf53](https://github.com/eeshangarg/public-repo/commit/2e8cf535fb38a3dab2476cdf856efda904ad4c94))"
self.send_and_test_stream_message('push__1_commit_without_username', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_1_commit_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url('master,changes')
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))"
self.send_and_test_stream_message('push__1_commit', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_multiple_comitters(self) -> None:
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5)
self.send_and_test_stream_message('push__multiple_committers', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_multiple_comitters_with_others(self) -> None:
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9)
self.send_and_test_stream_message('push__multiple_committers_with_others', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_multiple_comitters_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url('master,changes')
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5)
self.send_and_test_stream_message('push__multiple_committers', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_multiple_comitters_with_others_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url('master,changes')
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9)
self.send_and_test_stream_message('push__multiple_committers_with_others', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_50_commits(self) -> None:
commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n"
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format(
commit_info * COMMITS_LIMIT
)
self.send_and_test_stream_message('push__50_commits', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_push_50_commits_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,changes')
commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n"
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format(
commit_info * COMMITS_LIMIT
)
self.send_and_test_stream_message('push__50_commits', self.EXPECTED_TOPIC_BRANCH_EVENTS, expected_message)
def test_commit_comment_msg(self) -> None:
expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b#commitcomment-11056394) on [9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b):\n~~~ quote\nThis is a really good change! :+1:\n~~~"
self.send_and_test_stream_message('commit_comment', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_create_msg(self) -> None:
expected_message = u"baxterthehacker created tag 0.0.1."
self.send_and_test_stream_message('create', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_delete_msg(self) -> None:
expected_message = u"baxterthehacker deleted tag simple-tag."
self.send_and_test_stream_message('delete', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_deployment_msg(self) -> None:
expected_message = u"baxterthehacker created new deployment."
self.send_and_test_stream_message('deployment', self.EXPECTED_TOPIC_DEPLOYMENT_EVENTS, expected_message)
def test_deployment_status_msg(self) -> None:
expected_message = u"Deployment changed status to success."
self.send_and_test_stream_message('deployment_status', self.EXPECTED_TOPIC_DEPLOYMENT_EVENTS, expected_message)
def test_fork_msg(self) -> None:
expected_message = u"baxterandthehackers forked [public-repo](https://github.com/baxterandthehackers/public-repo)."
self.send_and_test_stream_message('fork', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_issue_comment_msg(self) -> None:
expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/issues/2#issuecomment-99262140) on [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nYou are totally right! I'll get this fixed right away.\n~~~"
self.send_and_test_stream_message('issue_comment', self.EXPECTED_TOPIC_ISSUE_EVENTS, expected_message)
def test_issue_comment_deleted_msg(self) -> None:
expected_topic = u"Scheduler / Issue #5 This is a new issue"
expected_message = u"eeshangarg deleted a [comment](https://github.com/eeshangarg/Scheduler/issues/5#issuecomment-425164194) on [Issue #5](https://github.com/eeshangarg/Scheduler/issues/5):\n\n~~~ quote\nThis is a comment on this new issue.\n~~~"
self.send_and_test_stream_message('issue_comment__deleted', expected_topic, expected_message)
def test_issue_comment_msg_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/issues/2#issuecomment-99262140) on [Issue #2 Spelling error in the README file](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nYou are totally right! I'll get this fixed right away.\n~~~"
self.send_and_test_stream_message('issue_comment', expected_topic, expected_message)
def test_issue_msg(self) -> None:
expected_message = u"baxterthehacker opened [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nIt looks like you accidently spelled 'commit' with two 't's.\n~~~"
self.send_and_test_stream_message('issues', self.EXPECTED_TOPIC_ISSUE_EVENTS, expected_message)
def test_issue_msg_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"baxterthehacker opened [Issue #2 Spelling error in the README file](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nIt looks like you accidently spelled 'commit' with two 't's.\n~~~"
self.send_and_test_stream_message('issues', expected_topic, expected_message)
def test_membership_msg(self) -> None:
expected_message = u"baxterthehacker added [kdaigle](https://github.com/kdaigle) to the Contractors team."
self.send_and_test_stream_message('membership', self.EXPECTED_TOPIC_ORGANIZATION_EVENTS, expected_message)
def test_membership_removal_msg(self) -> None:
expected_message = u"baxterthehacker removed [kdaigle](https://github.com/kdaigle) from the Contractors team."
self.send_and_test_stream_message('membership__removal', self.EXPECTED_TOPIC_ORGANIZATION_EVENTS, expected_message)
def test_member_msg(self) -> None:
expected_message = u"baxterthehacker added [octocat](https://github.com/octocat) to [public-repo](https://github.com/baxterthehacker/public-repo)."
self.send_and_test_stream_message('member', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_pull_request_opened_msg(self) -> None:
expected_message = u"baxterthehacker opened [PR #1](https://github.com/baxterthehacker/public-repo/pull/1) from `changes` to `master`:\n\n~~~ quote\nThis is a pretty simple change that we need to pull into master.\n~~~"
self.send_and_test_stream_message('pull_request__opened', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_pull_request_opened_with_preassigned_assignee_msg(self) -> None:
expected_topic = u"Scheduler / PR #4 Improve README"
expected_message = u"eeshangarg opened [PR #4](https://github.com/eeshangarg/Scheduler/pull/4) (assigned to eeshangarg) from `improve-readme-2` to `master`."
self.send_and_test_stream_message('pull_request__opened_with_preassigned_assignee', expected_topic, expected_message)
def test_pull_request_opened_msg_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"baxterthehacker opened [PR #1 Update the README with new information](https://github.com/baxterthehacker/public-repo/pull/1) from `changes` to `master`:\n\n~~~ quote\nThis is a pretty simple change that we need to pull into master.\n~~~"
self.send_and_test_stream_message('pull_request__opened', expected_topic, expected_message)
def test_pull_request_synchronized_msg(self) -> None:
expected_message = u"baxterthehacker updated [PR #1](https://github.com/baxterthehacker/public-repo/pull/1) from `changes` to `master`."
self.send_and_test_stream_message('pull_request__synchronized', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_pull_request_closed_msg(self) -> None:
expected_message = u"baxterthehacker closed without merge [PR #1](https://github.com/baxterthehacker/public-repo/pull/1)."
self.send_and_test_stream_message('pull_request__closed', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_pull_request_closed_msg_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"baxterthehacker closed without merge [PR #1 Update the README with new information](https://github.com/baxterthehacker/public-repo/pull/1)."
self.send_and_test_stream_message('pull_request__closed', expected_topic, expected_message)
def test_pull_request_merged_msg(self) -> None:
expected_message = u"baxterthehacker merged [PR #1](https://github.com/baxterthehacker/public-repo/pull/1)."
self.send_and_test_stream_message('pull_request__merged', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_public_msg(self) -> None:
expected_message = u"baxterthehacker made [the repository](https://github.com/baxterthehacker/public-repo) public."
self.send_and_test_stream_message('public', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_wiki_pages_msg(self) -> None:
expected_message = u"jasonrudolph:\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)"
self.send_and_test_stream_message('gollum__wiki_pages', self.EXPECTED_TOPIC_WIKI_EVENTS, expected_message)
def test_watch_msg(self) -> None:
expected_message = u"baxterthehacker starred [the repository](https://github.com/baxterthehacker/public-repo)."
self.send_and_test_stream_message('watch__repository', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_repository_msg(self) -> None:
expected_message = u"baxterthehacker created [the repository](https://github.com/baxterandthehackers/public-repo)."
self.send_and_test_stream_message('repository', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_team_add_msg(self) -> None:
expected_message = u"[The repository](https://github.com/baxterandthehackers/public-repo) was added to team github."
self.send_and_test_stream_message('team_add', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_release_msg(self) -> None:
expected_message = u"baxterthehacker published [the release](https://github.com/baxterthehacker/public-repo/releases/tag/0.0.1)."
self.send_and_test_stream_message('release', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_page_build_msg(self) -> None:
expected_message = u"Github Pages build, trigerred by baxterthehacker, has finished building."
self.send_and_test_stream_message('page_build', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_status_msg(self) -> None:
expected_message = u"[9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b) changed its status to success."
self.send_and_test_stream_message('status', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_status_with_target_url_msg(self) -> None:
expected_message = u"[9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b) changed its status to [success](https://example.com/build/status)."
self.send_and_test_stream_message('status__with_target_url', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_pull_request_review_msg(self) -> None:
expected_message = u"baxterthehacker submitted [PR Review](https://github.com/baxterthehacker/public-repo/pull/1#pullrequestreview-2626884)."
self.send_and_test_stream_message('pull_request_review', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_pull_request_review_msg_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"baxterthehacker submitted [PR Review for #1 Update the README with new information](https://github.com/baxterthehacker/public-repo/pull/1#pullrequestreview-2626884)."
self.send_and_test_stream_message('pull_request_review', expected_topic, expected_message)
def test_pull_request_review_comment_msg(self) -> None:
expected_message = u"baxterthehacker created [PR Review Comment](https://github.com/baxterthehacker/public-repo/pull/1#discussion_r29724692):\n\n~~~ quote\nMaybe you should use more emojji on this line.\n~~~"
self.send_and_test_stream_message('pull_request_review_comment', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_pull_request_review_comment_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"baxterthehacker created [PR Review Comment on #1 Update the README with new information](https://github.com/baxterthehacker/public-repo/pull/1#discussion_r29724692):\n\n~~~ quote\nMaybe you should use more emojji on this line.\n~~~"
self.send_and_test_stream_message('pull_request_review_comment', expected_topic, expected_message)
def test_push_tag_msg(self) -> None:
expected_message = u"baxterthehacker pushed tag abc."
self.send_and_test_stream_message('push__tag', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
def test_pull_request_edited_msg(self) -> None:
expected_message = u"baxterthehacker edited [PR #1](https://github.com/baxterthehacker/public-repo/pull/1) from `changes` to `master`."
self.send_and_test_stream_message('pull_request__edited', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_pull_request_assigned_msg(self) -> None:
expected_message = u"baxterthehacker assigned [PR #1](https://github.com/baxterthehacker/public-repo/pull/1) to baxterthehacker."
self.send_and_test_stream_message('pull_request__assigned', self.EXPECTED_TOPIC_PR_EVENTS, expected_message)
def test_pull_request_assigned_msg_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"baxterthehacker assigned [PR #1 Update the README with new information](https://github.com/baxterthehacker/public-repo/pull/1) to baxterthehacker."
self.send_and_test_stream_message('pull_request__assigned', expected_topic, expected_message)
def test_pull_request_unassigned_msg(self) -> None:
expected_message = u"eeshangarg unassigned [PR #1](https://github.com/zulip-test-org/helloworld/pull/1)."
self.send_and_test_stream_message('pull_request__unassigned', 'helloworld / PR #1 Mention that Zulip rocks!', expected_message)
def test_pull_request_review_requested_msg(self) -> None:
expected_message = u"**eeshangarg** requested [showell](https://github.com/showell) for a review on [PR #1](https://github.com/eeshangarg/Scheduler/pull/1)."
self.send_and_test_stream_message('pull_request__review_requested', 'Scheduler / PR #1 This is just a test commit', expected_message)
def test_pull_request_review_requested_singular_key_msg(self) -> None:
expected_message = u"**eeshangarg** requested [rishig](https://github.com/rishig) for a review on [PR #6](https://github.com/eeshangarg/Scheduler/pull/6)."
self.send_and_test_stream_message('pull_request__review_requested_singular_key',
'Scheduler / PR #6 Mention how awesome this project is in ...',
expected_message)
def test_pull_request_review_requested_multiple_reviwers_msg(self) -> None:
expected_message = u"**eeshangarg** requested [showell](https://github.com/showell), and [timabbott](https://github.com/timabbott) for a review on [PR #1](https://github.com/eeshangarg/Scheduler/pull/1)."
self.send_and_test_stream_message('pull_request__review_requested_multiple_reviewers',
'Scheduler / PR #1 This is just a test commit',
expected_message)
def test_pull_request_review_requested_with_custom_topic_in_url(self) -> None:
self.url = self.build_webhook_url(topic='notifications')
expected_topic = u"notifications"
expected_message = u"**eeshangarg** requested [showell](https://github.com/showell) for a review on [PR #1 This is just a test commit](https://github.com/eeshangarg/Scheduler/pull/1)."
self.send_and_test_stream_message('pull_request__review_requested', expected_topic, expected_message)
def test_check_run(self) -> None:
expected_topic = u"hello-world / checks"
expected_message = u"""
Check [randscape](http://github.com/github/hello-world/runs/4) completed (success). ([d6fde92](http://github.com/github/hello-world/commit/d6fde92930d4715a2b49857d24b940956b26d2d3))
""".strip()
self.send_and_test_stream_message('check_run__completed', expected_topic, expected_message)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_check_run_in_progress_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
payload = self.get_body('check_run__in_progress')
result = self.client_post(self.url, payload,
HTTP_X_GITHUB_EVENT='check_run',
content_type="application/json")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_pull_request_labeled_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
payload = self.get_body('pull_request__labeled')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_pull_request_unlabeled_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
payload = self.get_body('pull_request__unlabeled')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_pull_request_request_review_remove_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
payload = self.get_body('pull_request__request_review_removed')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_push_1_commit_filtered_by_branches_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push__1_commit')
result = self.client_post(self.url, payload, content_type="application/json", HTTP_X_GITHUB_EVENT="push")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_push_50_commits_filtered_by_branches_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push__50_commits')
result = self.client_post(self.url, payload, content_type="application/json", HTTP_X_GITHUB_EVENT="push")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_push_multiple_comitters_filtered_by_branches_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push__multiple_committers')
result = self.client_post(self.url, payload, content_type="application/json", HTTP_X_GITHUB_EVENT="push")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_push_multiple_comitters_with_others_filtered_by_branches_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push__multiple_committers_with_others')
result = self.client_post(self.url, payload, content_type="application/json", HTTP_X_GITHUB_EVENT="push")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github.view.check_send_webhook_message')
def test_repository_vulnerability_alert_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url()
payload = self.get_body('repository_vulnerability_alert')
result = self.client_post(self.url, payload,
HTTP_X_GITHUB_EVENT='repository_vulnerability_alert',
content_type="application/json")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
| 76.834225 | 387 | 0.759396 | 3,721 | 28,736 | 5.538296 | 0.076055 | 0.084433 | 0.048913 | 0.042217 | 0.891062 | 0.870778 | 0.831182 | 0.775815 | 0.722244 | 0.668575 | 0 | 0.032536 | 0.135788 | 28,736 | 373 | 388 | 77.040214 | 0.797294 | 0 | 0 | 0.32 | 0 | 0.163333 | 0.438753 | 0.046875 | 0 | 0 | 0 | 0 | 0.06 | 1 | 0.223333 | false | 0 | 0.01 | 0 | 0.27 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b9f07ac0ba7546334a98e78153898180a93ff16d | 1,642 | py | Python | tests/y_2020/test_2020_day6.py | Stegallo/adventofcode | 74cd3725318150a4fee397a9388326d56ca49333 | [
"MIT"
] | 4 | 2020-11-28T15:46:43.000Z | 2021-12-02T22:37:27.000Z | tests/y_2020/test_2020_day6.py | Stegallo/adventofcode | 74cd3725318150a4fee397a9388326d56ca49333 | [
"MIT"
] | 80 | 2020-07-02T19:01:20.000Z | 2021-12-06T03:48:46.000Z | tests/y_2020/test_2020_day6.py | Stegallo/adventofcode | 74cd3725318150a4fee397a9388326d56ca49333 | [
"MIT"
] | 2 | 2021-04-04T06:22:56.000Z | 2021-05-03T20:23:51.000Z | from unittest.mock import mock_open, patch
from y_2020.day6 import Counter, Day, UserGroup
with patch("builtins.open", mock_open(read_data=":")):
day = Day()
def test__preprocess_input():
day._input_data = [
"abc",
"",
"a",
"b",
"c",
"",
"ab",
"ac",
"",
"a",
"a",
"a",
"a",
"",
"b",
]
day._preprocess_input()
assert day._Day__user_groups == [
UserGroup(answers=Counter({"a": 1, "b": 1, "c": 1}), size=1),
UserGroup(answers=Counter({"a": 1, "b": 1, "c": 1}), size=3),
UserGroup(answers=Counter({"a": 2, "b": 1, "c": 1}), size=2),
UserGroup(answers=Counter({"a": 4}), size=4),
UserGroup(answers=Counter({"b": 1}), size=1),
]
def test_calculate_1():
day._Day__user_groups = [
UserGroup(answers=Counter({"a": 1, "b": 1, "c": 1}), size=1),
UserGroup(answers=Counter({"a": 1, "b": 1, "c": 1}), size=3),
UserGroup(answers=Counter({"a": 2, "b": 1, "c": 1}), size=2),
UserGroup(answers=Counter({"a": 4}), size=4),
UserGroup(answers=Counter({"b": 1}), size=1),
]
assert day._calculate_1() == 11
def test_calculate_2():
day._Day__user_groups = [
UserGroup(answers=Counter({"a": 1, "b": 1, "c": 1}), size=1),
UserGroup(answers=Counter({"a": 1, "b": 1, "c": 1}), size=3),
UserGroup(answers=Counter({"a": 2, "b": 1, "c": 1}), size=2),
UserGroup(answers=Counter({"a": 4}), size=4),
UserGroup(answers=Counter({"b": 1}), size=1),
]
assert day._calculate_2() == 6
| 28.807018 | 69 | 0.504872 | 218 | 1,642 | 3.66055 | 0.174312 | 0.300752 | 0.432331 | 0.360902 | 0.714286 | 0.714286 | 0.714286 | 0.714286 | 0.714286 | 0.714286 | 0 | 0.049958 | 0.268575 | 1,642 | 56 | 70 | 29.321429 | 0.614488 | 0 | 0 | 0.583333 | 0 | 0 | 0.037759 | 0 | 0 | 0 | 0 | 0 | 0.0625 | 1 | 0.0625 | false | 0 | 0.041667 | 0 | 0.104167 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6a2c2310ae618108fd1c182012e0253485fa6bc1 | 8,931 | py | Python | tests/test_haystack_his_read.py | sgrah-oss/haystackapi | dc6000120e5ef97b174bb1440460ce170f22026e | [
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null | tests/test_haystack_his_read.py | sgrah-oss/haystackapi | dc6000120e5ef97b174bb1440460ce170f22026e | [
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null | tests/test_haystack_his_read.py | sgrah-oss/haystackapi | dc6000120e5ef97b174bb1440460ce170f22026e | [
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null | from datetime import datetime, date, timedelta
import pytz
from mock import patch
from tzlocal.unix import get_localzone
import haystackapi
from haystackapi import Ref
from haystackapi.ops import HaystackHttpRequest, DEFAULT_MIME_TYPE
from haystackapi.providers import ping
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch('haystackapi.providers.haystack_interface.no_cache')
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_zinc(mock, no_cache) -> None:
# GIVEN
mock.return_value = ping.PingGrid
no_cache.return_value = True
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
grid = haystackapi.Grid(columns={'id': {}})
grid.append({"id": Ref("1234")})
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
request.body = haystackapi.dump(grid, mode=mime_type)
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
mock.assert_called_once_with(Ref("1234"),
(datetime.min.replace(tzinfo=pytz.UTC),
datetime.max.replace(tzinfo=pytz.UTC)), None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_args(mock) -> None:
# GIVEN
mock.return_value = ping.PingGrid
mime_type = DEFAULT_MIME_TYPE
request = HaystackHttpRequest()
request.args['id'] = str(Ref("1234"))
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
mock.assert_called_once_with(Ref("1234"),
(datetime.min.replace(tzinfo=pytz.UTC),
datetime.max.replace(tzinfo=pytz.UTC)),
None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_range_today(mock) -> None:
# GIVEN
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
grid = haystackapi.Grid(columns=['id', 'range'])
grid.append({"id": Ref("1234"), "range": "today"})
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
request.body = haystackapi.dump(grid, mode=mime_type)
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
today = datetime.combine(date.today(), datetime.min.time()).replace(tzinfo=get_localzone())
mock.assert_called_once_with(Ref("1234"),
(today,
today + timedelta(days=1, milliseconds=-1)
), None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_range_yesterday(mock) -> None:
# GIVEN
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
grid = haystackapi.Grid(columns=['id', 'range'])
grid.append({"id": Ref("1234"), "range": "yesterday"})
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
request.body = haystackapi.dump(grid, mode=mime_type)
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
yesterday = datetime.combine(date.today() - timedelta(days=1), datetime.min.time()) \
.replace(tzinfo=get_localzone())
mock.assert_called_once_with(Ref("1234"),
(yesterday, yesterday + timedelta(days=1, milliseconds=-1)), None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_range_two_datetime(mock) -> None:
# GIVEN
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
grid = haystackapi.Grid(columns=['id', 'range'])
datetime_1 = datetime(2020, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
datetime_2 = datetime(2020, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
grid.append({"id": Ref("1234"), "range": datetime_1.isoformat() + ',' + datetime_2.isoformat()})
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
request.body = haystackapi.dump(grid, mode=mime_type)
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
mock.assert_called_once_with(Ref("1234"), (datetime_1, datetime_2), None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_range_one_datetime(mock) -> None:
# GIVEN
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
grid = haystackapi.Grid(columns=['id', 'range'])
datetime_1 = datetime(2020, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
grid.append({"id": Ref("1234"), "range": datetime_1.isoformat()})
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
request.body = haystackapi.dump(grid, mode=mime_type)
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
cur_datetime = datetime.combine(datetime_1, datetime.min.time()).replace(tzinfo=pytz.UTC)
mock.assert_called_once_with(Ref("1234"),
(cur_datetime,
cur_datetime + timedelta(days=1, milliseconds=-1)
), None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_range_two_date(mock) -> None:
# GIVEN
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
grid = haystackapi.Grid(columns=['id', 'range'])
date_1 = date(2020, 1, 1)
date_2 = date(2020, 1, 2)
grid.append({"id": Ref("1234"), "range": date_1.isoformat() + ',' + date_2.isoformat()})
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
request.body = haystackapi.dump(grid, mode=mime_type)
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
mock.assert_called_once_with(Ref("1234"),
(datetime(2020, 1, 1).replace(tzinfo=get_localzone()),
datetime(2020, 1, 2).replace(tzinfo=get_localzone())
), None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch.object(ping.Provider, 'his_read')
def test_his_read_with_range_one_date(mock) -> None:
# GIVEN
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
grid = haystackapi.Grid(columns=['id', 'range'])
date_1 = date(2020, 1, 1)
grid.append({"id": Ref("1234"), "range": date_1.isoformat()})
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
request.body = haystackapi.dump(grid, mode=mime_type)
# WHEN
response = haystackapi.his_read(request, "dev")
# THEN
cur_datetime = datetime.combine(date_1, datetime.min.time()).replace(tzinfo=get_localzone())
mock.assert_called_once_with(Ref("1234"),
(cur_datetime,
cur_datetime + timedelta(days=1, milliseconds=-1)
), None)
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
| 40.049327 | 100 | 0.665211 | 1,085 | 8,931 | 5.295853 | 0.082949 | 0.065437 | 0.046989 | 0.025061 | 0.887052 | 0.883745 | 0.878698 | 0.872433 | 0.872433 | 0.860947 | 0 | 0.023813 | 0.200649 | 8,931 | 222 | 101 | 40.22973 | 0.781062 | 0.01422 | 0 | 0.730061 | 0 | 0 | 0.108555 | 0.029274 | 0 | 0 | 0 | 0 | 0.196319 | 1 | 0.04908 | false | 0 | 0.04908 | 0 | 0.09816 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a3009a265d4b8e068d0a3e86e7608ad6f7020ac | 5,453 | py | Python | panphon/test/test_distance.py | trenslow/panphon | f66df90e179179696e21991993bf06b8e9cebfba | [
"MIT"
] | null | null | null | panphon/test/test_distance.py | trenslow/panphon | f66df90e179179696e21991993bf06b8e9cebfba | [
"MIT"
] | null | null | null | panphon/test/test_distance.py | trenslow/panphon | f66df90e179179696e21991993bf06b8e9cebfba | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division, absolute_import
import unittest
import panphon
from panphon import distance
feature_model = 'segment'
dim = 24
class TestLevenshtein(unittest.TestCase):
def setUp(self):
self.dist = distance.Distance(feature_model=feature_model)
def test_trivial1(self):
self.assertEqual(self.dist.levenshtein_distance('pop', 'pʰop'), 1)
def test_trivial2(self):
self.assertEqual(self.dist.levenshtein_distance('pop', 'pʰom'), 2)
class TestDolgoPrime(unittest.TestCase):
def setUp(self):
self.dist = distance.Distance(feature_model=feature_model)
def test_trivial1(self):
self.assertEqual(self.dist.dolgo_prime_distance('pop', 'bob'), 0)
def test_trivial2(self):
self.assertEqual(self.dist.dolgo_prime_distance('pop', 'bab'), 0)
class TestUnweightedFeatureEditDist(unittest.TestCase):
def setUp(self):
self.dist = distance.Distance(feature_model=feature_model)
def test_unweighted_substitution_cost(self):
self.assertEqual(self.dist.unweighted_substitution_cost([0, 1, -1], [0, 1, 1]) * 3, 1)
def test_unweighted_deletion_cost(self):
self.assertEqual(self.dist.unweighted_deletion_cost([1, -1, 1, 0]) * 4, 3.5)
def test_trivial1(self):
self.assertEqual(self.dist.feature_edit_distance('bim', 'pym') * dim, 3)
def test_trivial2(self):
self.assertEqual(self.dist.feature_edit_distance('ti', 'tʰi') * dim, 1)
def test_xsampa(self):
self.assertEqual(self.dist.feature_edit_distance('t i', 't_h i', xsampa=True) * dim, 1)
def test_xsampa2(self):
self.assertEqual(self.dist.feature_edit_distance('p u n', 'p y n', xsampa=True) * dim, 1)
def test_xsampa3(self):
ipa = self.dist.jt_feature_edit_distance_div_maxlen('kʰin', 'pʰin')
xs = self.dist.jt_feature_edit_distance_div_maxlen('k_h i n', 'p_h i n', xsampa=True)
self.assertEqual(ipa, xs)
class TestWeightedFeatureEditDist(unittest.TestCase):
def setUp(self):
self.dist = distance.Distance(feature_model=feature_model)
def test_trivial1(self):
self.assertGreater(self.dist.weighted_feature_edit_distance('ti', 'tʰu'),
self.dist.weighted_feature_edit_distance('ti', 'tʰi'))
def test_trivial2(self):
self.assertGreater(self.dist.weighted_feature_edit_distance('ti', 'te'),
self.dist.weighted_feature_edit_distance('ti', 'tḭ'))
class TestHammingFeatureEditDistanceDivMaxlen(unittest.TestCase):
def setUp(self):
self.dist = distance.Distance(feature_model=feature_model)
def test_hamming_substitution_cost(self):
self.assertEqual(self.dist.hamming_substitution_cost(['+', '-', '0'], ['0', '-', '0']) * 3, 1)
def test_trivial1(self):
self.assertEqual(self.dist.hamming_feature_edit_distance_div_maxlen('pa', 'ba') * dim * 2, 1)
def test_trivial2(self):
self.assertEqual(self.dist.hamming_feature_edit_distance_div_maxlen('i', 'pi') * 2, 1)
def test_trivial3(self):
self.assertEqual(self.dist.hamming_feature_edit_distance_div_maxlen('sɛks', 'ɛɡz'), (1 + (1 / dim) + (1 / dim)) / 4)
def test_trivial4(self):
self.assertEqual(self.dist.hamming_feature_edit_distance_div_maxlen('k', 'ɡ'), 1 / dim)
class TestMany(unittest.TestCase):
def setUp(self):
self.dist = distance.Distance(feature_model=feature_model)
def test_fast_levenshtein_distance(self):
self.assertEqual(self.dist.fast_levenshtein_distance('p', 'b'), 1)
def test_fast_levenshtein_distance_div_maxlen(self):
self.assertEqual(self.dist.fast_levenshtein_distance_div_maxlen('p', 'b'), 1)
def test_dolgo_prime_distance(self):
self.assertEqual(self.dist.dolgo_prime_distance('p', 'b'), 0)
def test_dolgo_prime_div_maxlen(self):
self.assertEqual(self.dist.dolgo_prime_distance_div_maxlen('p', 'b'), 0)
def test_feature_edit_distance(self):
self.assertEqual(self.dist.feature_edit_distance('p', 'b'), 1 / dim)
def test_jt_feature_edit_distance(self):
self.assertEqual(self.dist.jt_feature_edit_distance('p', 'b'), 1 / dim)
def test_feature_edit_distance_div_maxlen(self):
self.assertEqual(self.dist.feature_edit_distance_div_maxlen('p', 'b'), 1 / dim)
def test_jt_feature_edit_distance_div_maxlen(self):
self.assertEqual(self.dist.jt_feature_edit_distance_div_maxlen('p', 'b'), 1 / dim)
def test_hamming_feature_edit_distance(self):
self.assertEqual(self.dist.hamming_feature_edit_distance('p', 'b'), 1 / dim)
def test_jt_hamming_feature_edit_distance(self):
self.assertEqual(self.dist.jt_hamming_feature_edit_distance('p', 'b'), 1 / dim)
def test_hamming_feature_edit_distance_div_maxlen(self):
self.assertEqual(self.dist.hamming_feature_edit_distance_div_maxlen('p', 'b'), 1 / dim)
def test_jt_hamming_feature_edit_distance_div_maxlen(self):
self.assertEqual(self.dist.jt_hamming_feature_edit_distance_div_maxlen('p', 'b'), 1 / dim)
class TestXSampa(unittest.TestCase):
def setUp(self):
self.dist = distance.Distance(feature_model=feature_model)
self.ft = panphon.FeatureTable()
def test_feature_edit_distance(self):
self.assertEqual(self.dist.feature_edit_distance("p_h", "p", xsampa=True), 1 / dim)
| 38.401408 | 124 | 0.706767 | 737 | 5,453 | 4.936228 | 0.131615 | 0.090159 | 0.167125 | 0.17702 | 0.810335 | 0.785047 | 0.763332 | 0.702859 | 0.558549 | 0.486256 | 0 | 0.014703 | 0.164313 | 5,453 | 141 | 125 | 38.673759 | 0.78363 | 0.003851 | 0 | 0.276596 | 0 | 0 | 0.028177 | 0 | 0 | 0 | 0 | 0 | 0.329787 | 1 | 0.404255 | false | 0 | 0.042553 | 0 | 0.521277 | 0.010638 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
dbe3ebccc368b54a3e011ce59cab57bbe444dbf1 | 192 | py | Python | stable_baselines3/her/__init__.py | Practical-Formal-Methods/mod_stable_baselines3 | 08bdb0a529c8ab446ac7973f2a02f832c0c3f454 | [
"MIT"
] | null | null | null | stable_baselines3/her/__init__.py | Practical-Formal-Methods/mod_stable_baselines3 | 08bdb0a529c8ab446ac7973f2a02f832c0c3f454 | [
"MIT"
] | null | null | null | stable_baselines3/her/__init__.py | Practical-Formal-Methods/mod_stable_baselines3 | 08bdb0a529c8ab446ac7973f2a02f832c0c3f454 | [
"MIT"
] | null | null | null | from mod_stable_baselines3.stable_baselines3.her.goal_selection_strategy import GoalSelectionStrategy
from mod_stable_baselines3.stable_baselines3.her.her_replay_buffer import HerReplayBuffer
| 64 | 101 | 0.927083 | 24 | 192 | 7 | 0.541667 | 0.380952 | 0.154762 | 0.27381 | 0.5 | 0.5 | 0.5 | 0 | 0 | 0 | 0 | 0.021739 | 0.041667 | 192 | 2 | 102 | 96 | 0.891304 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
dbf41fa018e365e433bb6e8d69248cba4b6ad817 | 229 | py | Python | python-student/MakeProto.py | Tallic/grpc-example | 6b3a65259ab44d64a7e3749e30dc7e0f36990d37 | [
"MIT"
] | null | null | null | python-student/MakeProto.py | Tallic/grpc-example | 6b3a65259ab44d64a7e3749e30dc7e0f36990d37 | [
"MIT"
] | 1 | 2017-07-12T12:38:25.000Z | 2017-07-12T12:38:25.000Z | python-student/MakeProto.py | Tallic/grpc-example | 6b3a65259ab44d64a7e3749e30dc7e0f36990d37 | [
"MIT"
] | null | null | null | import os
os.system('python -m grpc_tools.protoc '
'-I ../kotlin-professor/src/main/proto/ '
'--python_out=. '
'--grpc_python_out=. '
'../kotlin-professor/src/main/proto/Classroom.proto')
| 28.625 | 63 | 0.580786 | 27 | 229 | 4.777778 | 0.592593 | 0.232558 | 0.27907 | 0.341085 | 0.418605 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.240175 | 229 | 7 | 64 | 32.714286 | 0.741379 | 0 | 0 | 0 | 0 | 0 | 0.663755 | 0.371179 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.166667 | 0 | 0.166667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e0201b431a0e2f5a4ad5fb8355affc037211788f | 5,095 | py | Python | tests/tensorflow/hooks/test_simple_include.py | NRauschmayr/sagemaker-debugger | 24ed65631143fcc0457fb325a102500ebd69adfc | [
"Apache-2.0"
] | 133 | 2019-12-03T18:56:27.000Z | 2022-03-18T19:54:49.000Z | tests/tensorflow/hooks/test_simple_include.py | NRauschmayr/sagemaker-debugger | 24ed65631143fcc0457fb325a102500ebd69adfc | [
"Apache-2.0"
] | 384 | 2019-12-04T03:04:14.000Z | 2022-03-31T20:42:48.000Z | tests/tensorflow/hooks/test_simple_include.py | NRauschmayr/sagemaker-debugger | 24ed65631143fcc0457fb325a102500ebd69adfc | [
"Apache-2.0"
] | 64 | 2019-12-05T20:39:51.000Z | 2022-03-25T13:30:54.000Z | # Standard Library
import glob
# First Party
from smdebug.core.config_constants import DEFAULT_COLLECTIONS_FILE_NAME
from smdebug.core.json_config import CONFIG_FILE_PATH_ENV_STR
from smdebug.core.reader import FileReader
from smdebug.core.utils import get_path_to_collections
from smdebug.tensorflow import SaveConfig, SessionHook
from smdebug.tensorflow.collection import CollectionManager
# Local
from .utils import get_dirs_files, join, os, pre_test_clean_up, simple_model
def helper_test_simple_include(trial_dir, hook):
hook.get_collection("default").include("loss:0")
simple_model(hook, steps=10)
_, files = get_dirs_files(trial_dir)
steps, _ = get_dirs_files(os.path.join(trial_dir, "events"))
cm = CollectionManager.load(
join(get_path_to_collections(trial_dir), DEFAULT_COLLECTIONS_FILE_NAME)
)
assert len(cm.collections["default"].tensor_names) == 1
assert len(steps) == 5
for step in steps:
i = 0
size = 0
fs = glob.glob(join(trial_dir, "events", step, "**", "*.tfevents"), recursive=True)
for f in fs:
fr = FileReader(f)
for x in fr.read_tensors():
tensor_name, step, tensor_data, mode, mode_step = x
i += 1
size += tensor_data.nbytes if tensor_data is not None else 0
assert i == 1
assert size == 4
def test_simple_include(out_dir):
pre_test_clean_up()
hook = SessionHook(
out_dir=out_dir,
save_config=SaveConfig(save_interval=2),
include_collections=["default", "losses"],
)
helper_test_simple_include(out_dir, hook)
def test_simple_include_json(out_dir, monkeypatch):
pre_test_clean_up()
monkeypatch.setenv(
CONFIG_FILE_PATH_ENV_STR,
"tests/tensorflow/hooks/test_json_configs/test_simple_include.json",
)
hook = SessionHook.create_from_json_file()
helper_test_simple_include(out_dir, hook)
def helper_test_simple_include_regex(trial_dir, hook):
simple_model(hook, steps=10)
_, files = get_dirs_files(trial_dir)
steps, _ = get_dirs_files(os.path.join(trial_dir, "events"))
cm = CollectionManager.load(
join(get_path_to_collections(trial_dir), DEFAULT_COLLECTIONS_FILE_NAME)
)
assert len(cm.collections["default"].tensor_names) == 1
assert len(steps) == 5
for step in steps:
i = 0
size = 0
fs = glob.glob(join(trial_dir, "events", step, "**", "*.tfevents"), recursive=True)
for f in fs:
fr = FileReader(f)
for x in fr.read_tensors():
tensor_name, step, tensor_data, mode, mode_step = x
i += 1
size += tensor_data.nbytes if tensor_data is not None else 0
assert i == 1
assert size == 4
def test_simple_include_regex(out_dir):
pre_test_clean_up()
hook = SessionHook(
out_dir=out_dir,
include_regex=["loss:0"],
include_collections=[],
save_config=SaveConfig(save_interval=2),
)
helper_test_simple_include_regex(out_dir, hook)
def test_simple_include_regex_json(out_dir, monkeypatch):
pre_test_clean_up()
monkeypatch.setenv(
CONFIG_FILE_PATH_ENV_STR,
"tests/tensorflow/hooks/test_json_configs/test_simple_include_regex.json",
)
hook = SessionHook.create_from_json_file()
helper_test_simple_include_regex(out_dir, hook)
def helper_test_multi_collection_match(trial_dir, hook):
simple_model(hook, steps=10)
_, files = get_dirs_files(trial_dir)
steps, _ = get_dirs_files(os.path.join(trial_dir, "events"))
cm = CollectionManager.load(
join(get_path_to_collections(trial_dir), DEFAULT_COLLECTIONS_FILE_NAME)
)
assert len(cm.collections["default"].tensor_names) == 1
assert len(cm.collections["trial"].tensor_names) == 1
assert len(steps) == 5
for step in steps:
i = 0
size = 0
fs = glob.glob(join(trial_dir, "events", step, "**", "*.tfevents"), recursive=True)
for f in fs:
fr = FileReader(f)
for x in fr.read_tensors():
tensor_name, step, tensor_data, mode, mode_step = x
i += 1
size += tensor_data.nbytes if tensor_data is not None else 0
assert i == 1
assert size == 4
def test_multi_collection_match(out_dir):
pre_test_clean_up()
hook = SessionHook(
out_dir=out_dir,
include_regex=["loss:0"],
include_collections=["default", "trial"],
save_config=SaveConfig(save_interval=2),
)
hook.get_collection("trial").include("loss:0")
helper_test_multi_collection_match(out_dir, hook)
def test_multi_collection_match_json(out_dir, monkeypatch):
pre_test_clean_up()
monkeypatch.setenv(
CONFIG_FILE_PATH_ENV_STR,
"tests/tensorflow/hooks/test_json_configs/test_multi_collection_match.json",
)
hook = SessionHook.create_from_json_file()
hook.get_collection("trial").include("loss:0")
helper_test_multi_collection_match(out_dir, hook)
| 33.084416 | 91 | 0.674975 | 689 | 5,095 | 4.656023 | 0.139332 | 0.033666 | 0.063591 | 0.030549 | 0.848815 | 0.804863 | 0.759975 | 0.737843 | 0.722569 | 0.704177 | 0 | 0.009883 | 0.225515 | 5,095 | 153 | 92 | 33.300654 | 0.803092 | 0.006673 | 0 | 0.728 | 0 | 0 | 0.074946 | 0.041329 | 0 | 0 | 0 | 0 | 0.104 | 1 | 0.072 | false | 0 | 0.064 | 0 | 0.136 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e02ee836f47ae1fc4f29375fbbf59ed5fd7a675c | 321 | py | Python | east_asian_spacing/__init__.py | kojiishi/contextual-spacing | a38e8c1319ef511c8c9875899cfbd2dfdf5797bf | [
"Apache-2.0"
] | 2 | 2021-03-09T20:40:04.000Z | 2021-04-15T06:15:42.000Z | east_asian_spacing/__init__.py | kojiishi/contextual-spacing | a38e8c1319ef511c8c9875899cfbd2dfdf5797bf | [
"Apache-2.0"
] | null | null | null | east_asian_spacing/__init__.py | kojiishi/contextual-spacing | a38e8c1319ef511c8c9875899cfbd2dfdf5797bf | [
"Apache-2.0"
] | null | null | null | from east_asian_spacing.builder import *
from east_asian_spacing.config import *
from east_asian_spacing.dump import *
from east_asian_spacing.font import *
from east_asian_spacing.log_utils import *
from east_asian_spacing.shaper import *
from east_asian_spacing.spacing import *
from east_asian_spacing.tester import *
| 35.666667 | 42 | 0.850467 | 49 | 321 | 5.22449 | 0.265306 | 0.25 | 0.40625 | 0.625 | 0.710938 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.099688 | 321 | 8 | 43 | 40.125 | 0.885813 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e0382f204dec15698dae5af6ce9d1a2e2730b10e | 2,886 | py | Python | src/tests/unit/persistense/test_neo4j_api.py | AITestingOrg/aide--domain-expert-repository | 7b021da253e03ccc1b5f90fcdd51cf9550220dd7 | [
"MIT"
] | 2 | 2017-10-19T21:15:49.000Z | 2017-10-31T13:44:09.000Z | src/tests/unit/persistense/test_neo4j_api.py | AITestingOrg/domain-expert-prototype | 7b021da253e03ccc1b5f90fcdd51cf9550220dd7 | [
"MIT"
] | null | null | null | src/tests/unit/persistense/test_neo4j_api.py | AITestingOrg/domain-expert-prototype | 7b021da253e03ccc1b5f90fcdd51cf9550220dd7 | [
"MIT"
] | 1 | 2020-08-26T09:02:08.000Z | 2020-08-26T09:02:08.000Z | from ....common.persitence.neo4j_api import Neo4jApi
# given connect is called
# when a configuration is given
# then the driver should be called with the correct parameters
def test_connect_parameters(mocker):
# arrange
stub = mocker.stub(name='driver_stub')
neo = Neo4jApi(stub, 'con', 'test')
# act
neo._connect()
# assert
stub.assert_called_once_with('con', 'test')
assert stub.call_count == 1
# given close is called
# when a connection is open
# then the session should be closed.
def test_close_session(mocker):
# arrange
class DriverMock(object):
def session(self):
return self
def close(self):
pass
def run(self, query, obj=None):
pass
driverMock = DriverMock()
def mockFactory(param1, param2):
return driverMock
mocker.spy(driverMock, 'close')
neo = Neo4jApi(mockFactory, 'con', 'test')
neo._connect()
# act
neo._close()
# assert
assert driverMock.close.call_count == 1
# given close is called
# when a connection is not open
# then nothing should happen.
def test_close_unopen_session(mocker):
# arrange
class DriverMock(object):
def session(self):
return self
def close(self):
pass
def run(self, query, obj=None):
pass
driverMock = DriverMock()
def mockFactory(param1, param2):
return driverMock
mocker.spy(driverMock, 'close')
neo = Neo4jApi(mockFactory, 'con', 'test')
# act
neo._close()
# assert
assert driverMock.close.call_count == 0
# given run is called
# when pass an object
# the session should be run with the object.
def test_run_called_with_object(mocker):
# arrange
class DriverMock(object):
def session(self):
return self
def close(self):
pass
def run(self, query, obj=None):
pass
driverMock = DriverMock()
def mockFactory(param1, param2):
return driverMock
mocker.spy(driverMock, 'run')
neo = Neo4jApi(mockFactory, 'con', 'test')
# act
neo._run('test', {'test':'1'})
# assert
driverMock.run.assert_called_once_with('test', {'test':'1'})
# given run is called
# when pass an object
# the session should be run with the object.
def test_run_called_without_object(mocker):
# arrange
class DriverMock(object):
def session(self):
return self
def close(self):
pass
def run(self, query, obj=None):
print('t')
pass
driverMock = DriverMock()
def mockFactory(param1, param2):
return driverMock
mocker.spy(driverMock, 'run')
neo = Neo4jApi(mockFactory, 'con', 'test')
# act
neo._run('test')
# assert
driverMock.run.assert_called_once_with('test')
| 20.181818 | 64 | 0.61781 | 345 | 2,886 | 5.066667 | 0.185507 | 0.024027 | 0.034325 | 0.029748 | 0.762586 | 0.762586 | 0.762586 | 0.762586 | 0.713387 | 0.66476 | 0 | 0.009639 | 0.281012 | 2,886 | 142 | 65 | 20.323944 | 0.832771 | 0.186417 | 0 | 0.753623 | 0 | 0 | 0.041433 | 0 | 0 | 0 | 0 | 0 | 0.086957 | 1 | 0.304348 | false | 0.115942 | 0.014493 | 0.115942 | 0.492754 | 0.014493 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 9 |
e05abe60ada4a7e988d66c26523022c36fa5e523 | 15,326 | py | Python | pcdet/models/backbones_3d/rsn_backbone.py | ocNflag/point2seq | 710686f576b3df5469a06c66860758b25f852dbd | [
"Apache-2.0"
] | 21 | 2022-03-24T09:37:38.000Z | 2022-03-31T13:21:54.000Z | pcdet/models/backbones_3d/rsn_backbone.py | ocNflag/point2seq | 710686f576b3df5469a06c66860758b25f852dbd | [
"Apache-2.0"
] | null | null | null | pcdet/models/backbones_3d/rsn_backbone.py | ocNflag/point2seq | 710686f576b3df5469a06c66860758b25f852dbd | [
"Apache-2.0"
] | 1 | 2022-03-24T09:37:48.000Z | 2022-03-24T09:37:48.000Z | from functools import partial
import spconv
import torch.nn as nn
class B1Block(spconv.SparseModule):
def __init__(self, inplanes, planes, norm_fn=None, indice_key=None):
super(B1Block, self).__init__()
assert norm_fn is not None
bias = norm_fn is not None
self.conv1 = spconv.SubMConv3d(
inplanes, planes, kernel_size=3, stride=1, padding=1, bias=bias, indice_key=indice_key
)
self.bn1 = norm_fn(planes)
self.relu = nn.ReLU()
self.conv2 = spconv.SubMConv3d(
planes, planes, kernel_size=3, stride=1, padding=1, bias=bias, indice_key=indice_key
)
self.bn2 = norm_fn(planes)
def forward(self, x):
identity = x
out = self.conv1(x)
out.features = self.bn1(out.features)
out.features = self.relu(out.features)
out = self.conv2(out)
out.features = self.bn2(out.features)
out.features += identity.features
out.features = self.relu(out.features)
return out
class B0Block(spconv.SparseModule):
def __init__(self, inplanes, planes, stride=1, norm_fn=None, indice_key=None, spconv_key=None):
super(B0Block, self).__init__()
assert norm_fn is not None
bias = norm_fn is not None
self.conv0 = spconv.SparseConv3d(inplanes, planes, kernel_size=3, stride=stride, padding=1, bias=False, indice_key=spconv_key)
self.bn0 = norm_fn(planes)
self.conv1 = spconv.SubMConv3d(
planes, planes, kernel_size=3, stride=1, padding=1, bias=bias, indice_key=indice_key
)
self.bn1 = norm_fn(planes)
self.relu = nn.ReLU()
self.conv2 = spconv.SubMConv3d(
planes, planes, kernel_size=3, stride=1, padding=1, bias=bias, indice_key=indice_key
)
self.bn2 = norm_fn(planes)
self.stride = stride
def forward(self, x):
sp_out = self.conv0(x)
sp_out.features = self.bn0(sp_out.features)
sp_out.features = self.relu(sp_out.features)
identity = sp_out
out = self.conv1(sp_out)
out.features = self.bn1(out.features)
out.features = self.relu(out.features)
out = self.conv2(out)
out.features = self.bn2(out.features)
out.features += identity.features
out.features = self.relu(out.features)
return out
class CarS(nn.Module):
def __init__(self, model_cfg, input_channels, grid_size, **kwargs):
super().__init__()
self.model_cfg = model_cfg
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01)
self.sparse_shape = grid_size[::-1] + [1, 0, 0]
self.conv_input = spconv.SparseSequential(
spconv.SubMConv3d(input_channels, 64, 3, padding=1, bias=False, indice_key='subm_input'),
norm_fn(64),
nn.ReLU(),
)
self.conv1 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_1')
self.conv2 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_2', spconv_key='spc_2')
self.conv3 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_3')
self.conv4 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_4', spconv_key='spc_4')
self.conv5 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_5')
self.conv6 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_6')
self.num_point_features = 64
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size: int
vfe_features: (num_voxels, C)
voxel_coords: (num_voxels, 4), [batch_idx, z_idx, y_idx, x_idx]
Returns:
batch_dict:
encoded_spconv_tensor: sparse tensor
"""
voxel_features, voxel_coords = batch_dict['voxel_features'], batch_dict['voxel_coords']
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(
features=voxel_features,
indices=voxel_coords.int(),
spatial_shape=self.sparse_shape,
batch_size=batch_size
)
x_conv_input = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x_conv_input)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
x_conv5 = self.conv5(x_conv4)
x_conv6 = self.conv6(x_conv5)
batch_dict.update({
'encoded_spconv_tensor': x_conv6,
'encoded_spconv_tensor_stride': 4
})
batch_dict.update({
'multi_scale_3d_features': {
'x_conv1': x_conv1,
'x_conv2': x_conv2,
'x_conv3': x_conv3,
'x_conv4': x_conv4,
'x_conv5': x_conv5,
'x_conv6': x_conv6
}
})
return batch_dict
class CarL(nn.Module):
def __init__(self, model_cfg, input_channels, grid_size, **kwargs):
super().__init__()
self.model_cfg = model_cfg
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01)
self.sparse_shape = grid_size[::-1] + [1, 0, 0]
self.conv_input = spconv.SparseSequential(
spconv.SubMConv3d(input_channels, 64, 3, padding=1, bias=False, indice_key='subm_input'),
norm_fn(64),
nn.ReLU(),
)
self.conv1 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_1')
self.conv2 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_2', spconv_key='spc_2')
self.conv3 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_3', spconv_key='spc_3')
self.conv4 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_4', spconv_key='spc_4')
self.conv5 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_5', spconv_key='spc_5')
self.conv6 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_6', spconv_key='spc_6')
self.num_point_features = 64
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size: int
vfe_features: (num_voxels, C)
voxel_coords: (num_voxels, 4), [batch_idx, z_idx, y_idx, x_idx]
Returns:
batch_dict:
encoded_spconv_tensor: sparse tensor
"""
voxel_features, voxel_coords = batch_dict['voxel_features'], batch_dict['voxel_coords']
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(
features=voxel_features,
indices=voxel_coords.int(),
spatial_shape=self.sparse_shape,
batch_size=batch_size
)
x_conv_input = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x_conv_input)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
x_conv5 = self.conv5(x_conv4)
x_conv6 = self.conv6(x_conv5)
batch_dict.update({
'encoded_spconv_tensor': x_conv6,
'encoded_spconv_tensor_stride': 4
})
batch_dict.update({
'multi_scale_3d_features': {
'x_conv1': x_conv1,
'x_conv2': x_conv2,
'x_conv3': x_conv3,
'x_conv4': x_conv4,
'x_conv5': x_conv5,
'x_conv6': x_conv6
}
})
return batch_dict
class CarXL(nn.Module):
def __init__(self, model_cfg, input_channels, grid_size, **kwargs):
super().__init__()
self.model_cfg = model_cfg
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01)
self.sparse_shape = grid_size[::-1] + [1, 0, 0]
self.conv_input = spconv.SparseSequential(
spconv.SubMConv3d(input_channels, 64, 3, padding=1, bias=False, indice_key='subm_input'),
norm_fn(64),
nn.ReLU(),
)
self.conv1 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_1')
self.conv2 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_2', spconv_key='spc_2')
self.conv3 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_3', spconv_key='spc_3')
self.conv4 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_4', spconv_key='spc_4')
self.conv5 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_5', spconv_key='spc_5')
self.conv6 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_6', spconv_key='spc_6')
self.conv7 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_7', spconv_key='spc_7')
self.conv8 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_8', spconv_key='spc_8')
self.num_point_features = 64
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size: int
vfe_features: (num_voxels, C)
voxel_coords: (num_voxels, 4), [batch_idx, z_idx, y_idx, x_idx]
Returns:
batch_dict:
encoded_spconv_tensor: sparse tensor
"""
voxel_features, voxel_coords = batch_dict['voxel_features'], batch_dict['voxel_coords']
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(
features=voxel_features,
indices=voxel_coords.int(),
spatial_shape=self.sparse_shape,
batch_size=batch_size
)
x_conv_input = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x_conv_input)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
x_conv5 = self.conv5(x_conv4)
x_conv6 = self.conv6(x_conv5)
x_conv7 = self.conv7(x_conv6)
x_conv8 = self.conv8(x_conv7)
batch_dict.update({
'encoded_spconv_tensor': x_conv8,
'encoded_spconv_tensor_stride': 4
})
batch_dict.update({
'multi_scale_3d_features': {
'x_conv1': x_conv1,
'x_conv2': x_conv2,
'x_conv3': x_conv3,
'x_conv4': x_conv4,
'x_conv5': x_conv5,
'x_conv6': x_conv6,
'x_conv7': x_conv7,
'x_conv8': x_conv8
}
})
return batch_dict
class PedS(nn.Module):
def __init__(self, model_cfg, input_channels, grid_size, **kwargs):
super().__init__()
self.model_cfg = model_cfg
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01)
self.sparse_shape = grid_size[::-1] + [1, 0, 0]
self.conv_input = spconv.SparseSequential(
spconv.SubMConv3d(input_channels, 64, 3, padding=1, bias=False, indice_key='subm_input'),
norm_fn(64),
nn.ReLU(),
)
self.conv1 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_1')
self.conv2 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_2', spconv_key='spc_2')
self.conv3 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_3')
self.conv4 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_4')
self.num_point_features = 64
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size: int
vfe_features: (num_voxels, C)
voxel_coords: (num_voxels, 4), [batch_idx, z_idx, y_idx, x_idx]
Returns:
batch_dict:
encoded_spconv_tensor: sparse tensor
"""
voxel_features, voxel_coords = batch_dict['voxel_features'], batch_dict['voxel_coords']
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(
features=voxel_features,
indices=voxel_coords.int(),
spatial_shape=self.sparse_shape,
batch_size=batch_size
)
x_conv_input = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x_conv_input)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
batch_dict.update({
'encoded_spconv_tensor': x_conv4,
'encoded_spconv_tensor_stride': 2
})
batch_dict.update({
'multi_scale_3d_features': {
'x_conv1': x_conv1,
'x_conv2': x_conv2,
'x_conv3': x_conv3,
'x_conv4': x_conv4,
}
})
return batch_dict
class PedL(nn.Module):
def __init__(self, model_cfg, input_channels, grid_size, **kwargs):
super().__init__()
self.model_cfg = model_cfg
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01)
self.sparse_shape = grid_size[::-1] + [1, 0, 0]
self.conv_input = spconv.SparseSequential(
spconv.SubMConv3d(input_channels, 64, 3, padding=1, bias=False, indice_key='subm_input'),
norm_fn(64),
nn.ReLU(),
)
self.conv1 = B1Block(inplanes=64, planes=64, norm_fn=norm_fn, indice_key='subm_1')
self.conv2 = B0Block(inplanes=64, planes=64, stride=2, norm_fn=norm_fn, indice_key='subm_2', spconv_key='spc_2')
self.conv3 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_3', spconv_key='spc_3')
self.conv4 = B0Block(inplanes=64, planes=64, stride=1, norm_fn=norm_fn, indice_key='subm_4', spconv_key='spc_4')
self.num_point_features = 64
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size: int
vfe_features: (num_voxels, C)
voxel_coords: (num_voxels, 4), [batch_idx, z_idx, y_idx, x_idx]
Returns:
batch_dict:
encoded_spconv_tensor: sparse tensor
"""
voxel_features, voxel_coords = batch_dict['voxel_features'], batch_dict['voxel_coords']
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(
features=voxel_features,
indices=voxel_coords.int(),
spatial_shape=self.sparse_shape,
batch_size=batch_size
)
x_conv_input = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x_conv_input)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
batch_dict.update({
'encoded_spconv_tensor': x_conv4,
'encoded_spconv_tensor_stride': 2
})
batch_dict.update({
'multi_scale_3d_features': {
'x_conv1': x_conv1,
'x_conv2': x_conv2,
'x_conv3': x_conv3,
'x_conv4': x_conv4,
}
})
return batch_dict | 37.841975 | 134 | 0.601984 | 2,023 | 15,326 | 4.228374 | 0.054869 | 0.05401 | 0.050152 | 0.05892 | 0.935352 | 0.935352 | 0.922376 | 0.90823 | 0.908113 | 0.908113 | 0 | 0.047997 | 0.284941 | 15,326 | 405 | 135 | 37.841975 | 0.732549 | 0.073666 | 0 | 0.807432 | 0 | 0 | 0.075641 | 0.026083 | 0 | 0 | 0 | 0 | 0.006757 | 1 | 0.047297 | false | 0 | 0.010135 | 0 | 0.10473 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e0665bcb26b2720695d288ead1d222da0355e11e | 11,877 | py | Python | tests/hwsim/test_authsrv.py | byu343/sonic-wpa-supplicant | 447e9f708df996b8de88ce4fe703c0901357e12b | [
"Unlicense"
] | 1,104 | 2021-05-11T18:45:36.000Z | 2022-03-30T22:39:34.000Z | tests/hwsim/test_authsrv.py | byu343/sonic-wpa-supplicant | 447e9f708df996b8de88ce4fe703c0901357e12b | [
"Unlicense"
] | 44 | 2021-05-12T01:12:58.000Z | 2022-03-31T07:09:52.000Z | tests/hwsim/test_authsrv.py | byu343/sonic-wpa-supplicant | 447e9f708df996b8de88ce4fe703c0901357e12b | [
"Unlicense"
] | 168 | 2021-05-11T22:02:54.000Z | 2022-03-24T03:21:51.000Z | # hostapd authentication server tests
# Copyright (c) 2017, Jouni Malinen
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import hostapd
from utils import alloc_fail, fail_test, wait_fail_trigger
def authsrv_params():
params = {"ssid": "as", "beacon_int": "2000",
"radius_server_clients": "auth_serv/radius_clients.conf",
"radius_server_auth_port": '18128',
"eap_server": "1",
"eap_user_file": "auth_serv/eap_user.conf",
"eap_sim_db": "unix:/tmp/hlr_auc_gw.sock",
"ca_cert": "auth_serv/ca.pem",
"server_cert": "auth_serv/server.pem",
"private_key": "auth_serv/server.key",
"eap_message": "hello"}
return params
def test_authsrv_oom(dev, apdev):
"""Authentication server OOM"""
params = authsrv_params()
authsrv = hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
params['auth_server_port'] = "18128"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(hapd.own_addr(), 2412)
with alloc_fail(authsrv, 1, "hostapd_radius_get_eap_user"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("EAP failure not reported")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
with alloc_fail(authsrv, 1, "srv_log"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
with alloc_fail(authsrv, 1, "radius_server_new_session"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
for count in range(1, 3):
with alloc_fail(authsrv, count, "=radius_server_get_new_session"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
with alloc_fail(authsrv, 1, "eap_server_sm_init"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
tests = ["radius_server_encapsulate_eap",
"radius_server_receive_auth"]
for t in tests:
with alloc_fail(authsrv, 1, t):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
wait_fail_trigger(authsrv, "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
tests = ["radius_msg_add_attr;radius_server_encapsulate_eap",
"radius_msg_add_eap;radius_server_encapsulate_eap",
"radius_msg_finish_srv;radius_server_encapsulate_eap"]
for t in tests:
with fail_test(authsrv, 1, t):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
wait_fail_trigger(authsrv, "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
with alloc_fail(authsrv, 1, "radius_server_get_new_session"):
with fail_test(authsrv, 1, "radius_msg_add_eap;radius_server_reject"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
wait_fail_trigger(authsrv, "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
with alloc_fail(authsrv, 1, "radius_server_get_new_session"):
with fail_test(authsrv, 1,
"radius_msg_finish_srv;radius_server_reject"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
wait_fail_trigger(authsrv, "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
authsrv.disable()
with alloc_fail(authsrv, 1, "radius_server_init;hostapd_setup_radius_srv"):
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
with alloc_fail(authsrv, 2, "radius_server_init;hostapd_setup_radius_srv"):
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
for count in range(1, 4):
with alloc_fail(authsrv, count,
"radius_server_read_clients;radius_server_init;hostapd_setup_radius_srv"):
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
with alloc_fail(authsrv, 1, "eloop_sock_table_add_sock;radius_server_init;hostapd_setup_radius_srv"):
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
with alloc_fail(authsrv, 1, "tls_init;authsrv_init"):
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
for count in range(1, 3):
with alloc_fail(authsrv, count, "eap_sim_db_init;authsrv_init"):
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
def test_authsrv_errors_1(dev, apdev):
"""Authentication server errors (1)"""
params = authsrv_params()
params["eap_user_file"] = "sqlite:auth_serv/does-not-exist/does-not-exist"
authsrv = hostapd.add_ap(apdev[1], params, no_enable=True)
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded with invalid SQLite EAP user file")
def test_authsrv_errors_2(dev, apdev):
"""Authentication server errors (2)"""
params = authsrv_params()
params["radius_server_clients"] = "auth_serv/does-not-exist"
authsrv = hostapd.add_ap(apdev[1], params, no_enable=True)
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded with invalid RADIUS client file")
def test_authsrv_errors_3(dev, apdev):
"""Authentication server errors (3)"""
params = authsrv_params()
params["eap_sim_db"] = "unix:/tmp/hlr_auc_gw.sock db=auth_serv/does-not-exist/does-not-exist"
authsrv = hostapd.add_ap(apdev[1], params, no_enable=True)
if "FAIL" not in authsrv.request("ENABLE"):
raise Exception("ENABLE succeeded with invalid RADIUS client file")
def test_authsrv_testing_options(dev, apdev):
"""Authentication server and testing options"""
params = authsrv_params()
authsrv = hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
params['auth_server_port'] = "18128"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(hapd.own_addr(), 2412)
# The first two would be fine to run with any server build; the rest are
# actually supposed to fail, but they don't fail when using a server build
# that does not support the TLS protocol tests.
tests = ["foo@test-unknown",
"foo@test-tls-unknown",
"foo@test-tls-1",
"foo@test-tls-2",
"foo@test-tls-3",
"foo@test-tls-4",
"foo@test-tls-5",
"foo@test-tls-6",
"foo@test-tls-7",
"foo@test-tls-8"]
for t in tests:
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity=t,
password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
def test_authsrv_unknown_user(dev, apdev):
"""Authentication server and unknown user"""
params = authsrv_params()
params["eap_user_file"] = "auth_serv/eap_user_vlan.conf"
authsrv = hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
params['auth_server_port'] = "18128"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
def test_authsrv_unknown_client(dev, apdev):
"""Authentication server and unknown user"""
params = authsrv_params()
params["radius_server_clients"] = "auth_serv/radius_clients_none.conf"
authsrv = hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
params['auth_server_port'] = "18128"
hapd = hostapd.add_ap(apdev[0], params)
# RADIUS SRV: Unknown client 127.0.0.1 - packet ignored
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
eap="TTLS", identity="user",
anonymous_identity="ttls", password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=10)
if ev is None:
raise Exception("EAP not started")
dev[0].request("REMOVE_NETWORK all")
| 45.159696 | 105 | 0.604867 | 1,501 | 11,877 | 4.561626 | 0.123917 | 0.028041 | 0.025705 | 0.040894 | 0.84475 | 0.803125 | 0.776837 | 0.74602 | 0.74602 | 0.713013 | 0 | 0.025705 | 0.263029 | 11,877 | 262 | 106 | 45.332061 | 0.756541 | 0.055317 | 0 | 0.685185 | 0 | 0.00463 | 0.281765 | 0.098783 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037037 | false | 0.055556 | 0.009259 | 0 | 0.050926 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
0ecf127c9767f1d1ab3e898b6a14a0f4cd7b239c | 309 | py | Python | QCA4020_SDK/target/sectools/qdn/sectools/features/isc/stager/__init__.py | r8d8/lastlock | 78c02e5fbb129b1bc4147bd55eec2882267d7e87 | [
"Apache-2.0"
] | null | null | null | QCA4020_SDK/target/sectools/qdn/sectools/features/isc/stager/__init__.py | r8d8/lastlock | 78c02e5fbb129b1bc4147bd55eec2882267d7e87 | [
"Apache-2.0"
] | null | null | null | QCA4020_SDK/target/sectools/qdn/sectools/features/isc/stager/__init__.py | r8d8/lastlock | 78c02e5fbb129b1bc4147bd55eec2882267d7e87 | [
"Apache-2.0"
] | null | null | null | # ===============================================================================
#
# Copyright (c) 2013-2017 Qualcomm Technologies, Inc.
# All Rights Reserved.
# Confidential and Proprietary - Qualcomm Technologies, Inc.
#
# ===============================================================================
| 38.625 | 81 | 0.346278 | 16 | 309 | 6.6875 | 0.8125 | 0.373832 | 0.429907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02847 | 0.090615 | 309 | 7 | 82 | 44.142857 | 0.352313 | 0.951456 | 0 | null | 0 | null | 0 | 0 | null | 1 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0ee82bbf5371dc94b5ea74ccfb3f64c790c859c1 | 182 | py | Python | src/pyndf/qtlib.py | Guillaume-Guardia/ndf-python | 836498b210d2f921e76292df8046cd79006b458a | [
"MIT"
] | null | null | null | src/pyndf/qtlib.py | Guillaume-Guardia/ndf-python | 836498b210d2f921e76292df8046cd79006b458a | [
"MIT"
] | null | null | null | src/pyndf/qtlib.py | Guillaume-Guardia/ndf-python | 836498b210d2f921e76292df8046cd79006b458a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
try:
from PyQt6 import QtWidgets, QtCore, QtGui
from PyQt6.lupdate import lupdate
except ImportError:
from PyQt5 import QtWidgets, QtCore, QtGui
| 22.75 | 46 | 0.708791 | 23 | 182 | 5.608696 | 0.608696 | 0.139535 | 0.325581 | 0.403101 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027586 | 0.203297 | 182 | 7 | 47 | 26 | 0.862069 | 0.115385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.8 | 0 | 0.8 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1607484aa583c4a1de5d1c6c9caa02ed7031e722 | 175 | py | Python | webrtc_pkg/webrtc_pkg/RTCCam/__init__.py | Road-Balance/rb_nanosaur | cc17d184e926f3a021698e6f52e916b107dc0fc5 | [
"RSA-MD"
] | 7 | 2021-05-29T09:28:08.000Z | 2021-12-11T10:57:10.000Z | webrtc_pkg/webrtc_pkg/RTCCam/__init__.py | Road-Balance/rb_nanosaur | cc17d184e926f3a021698e6f52e916b107dc0fc5 | [
"RSA-MD"
] | 1 | 2021-05-31T20:49:13.000Z | 2021-06-01T11:39:16.000Z | webrtc_pkg/webrtc_pkg/RTCCam/__init__.py | Road-Balance/rb_nanosaur | cc17d184e926f3a021698e6f52e916b107dc0fc5 | [
"RSA-MD"
] | 1 | 2022-03-29T08:38:57.000Z | 2022-03-29T08:38:57.000Z | from .rtc_cam import WebCam
from .rtc_cam import GSTCam
from .rtc_cam import CSICam
from .rtc_cam import RawCam
from .dual_cam import DualCam
from .dual_cam import DualCSICam
| 25 | 32 | 0.828571 | 30 | 175 | 4.633333 | 0.366667 | 0.388489 | 0.28777 | 0.460432 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137143 | 175 | 6 | 33 | 29.166667 | 0.92053 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
160eab035185ce69c6d82371933904b8f566e5ba | 49,957 | py | Python | api/tests.py | max-belichenko/Flask-SQLAlchemy-API | 9c52be5ef43234718d8fa0152f85ae9a20d0424c | [
"BSD-2-Clause"
] | null | null | null | api/tests.py | max-belichenko/Flask-SQLAlchemy-API | 9c52be5ef43234718d8fa0152f85ae9a20d0424c | [
"BSD-2-Clause"
] | null | null | null | api/tests.py | max-belichenko/Flask-SQLAlchemy-API | 9c52be5ef43234718d8fa0152f85ae9a20d0424c | [
"BSD-2-Clause"
] | null | null | null | import base64
import os
import secrets
import unittest
from api.config import basedir, Config
from api import app, db
from api.models import *
class DatabaseTests(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['SECRET_KEY'] = secrets.token_urlsafe(16)
app.config['DEBUG'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'test.db')
db.create_all()
self.app = app.test_client()
def tearDown(self) -> None:
db.drop_all()
def test_db_users(self):
# Создать пользователя в базе данных
username = 'username'
password = 'password'
user_object = User(username=username)
user_object.hash_password(password)
db.session.add(user_object)
db.session.commit()
# Проверить, что пользователь создан с указанными параметрами
user_object = User.query.filter_by(username=username).first()
self.assertEqual(user_object.username, username)
self.assertTrue(user_object.verify_password(password))
self.assertEqual(user_object.roles, [])
# Создать роль в базе данных
role_code = 'TEST_ROLE_CODE'
role_title = 'This is a test role'
role_object = Role(code=role_code, title=role_title)
db.session.add(role_object)
db.session.commit()
# Назначить роль пользователю
user_object.roles.append(role_object)
db.session.commit()
# Проверить, что пользователь создан с указанными параметрами
user_object = User.query.filter_by(username=username).first()
self.assertEqual(user_object.username, username)
self.assertTrue(user_object.verify_password(password))
self.assertEqual(user_object.roles, [role_object])
def test_db_roles(self):
code = 'code'
title = 'title'
role_object = Role(code=code, title=title)
db.session.add(role_object)
db.session.commit()
role_object = Role.query.filter_by(code=code).first()
self.assertEqual(role_object.code, code)
self.assertEqual(role_object.title, title)
def test_db_statuses(self):
code = 'code'
title = 'title'
status_object = Status(code=code, title=title)
db.session.add(status_object)
db.session.commit()
status_object = Status.query.filter_by(code=code).first()
self.assertEqual(status_object.code, code)
self.assertEqual(status_object.title, title)
class APITests(unittest.TestCase):
@classmethod
def setUpClass(cls):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['SECRET_KEY'] = secrets.token_urlsafe(16)
app.config['DEBUG'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'test.db')
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
db.create_all()
# Заполнить роли
for key in Config.ACCOUNT_ROLES:
if Role.query.filter_by(code=key).first() is None:
role_object = Role(code=key, title=Config.ACCOUNT_ROLES[key])
db.session.add(role_object)
db.session.commit()
# Заполнить статусы заявок
for key in Config.REQUEST_STATUSES:
if Status.query.filter_by(code=key).first() is None:
status_object = Status(code=key, title=Config.REQUEST_STATUSES[key])
db.session.add(status_object)
db.session.commit()
# Создать пользователей
users = [
{'username': 'user', 'password': 'user_password'},
{'username': 'operator', 'password': 'operator_password'},
{'username': 'admin', 'password': 'admin_password'},
]
for user in users:
if User.query.filter_by(username=user['username']).first() is None:
user_object = User(username=user['username'])
user_object.hash_password(user['password'])
db.session.add(user_object)
db.session.commit()
# Назначить роли пользователям
user_roles = [
{'username': 'user', 'role': Config.USER_ROLE},
{'username': 'operator', 'role': Config.OPERATOR_ROLE},
{'username': 'admin', 'role': Config.ADMINISTRATOR_ROLE},
]
for user in user_roles:
user_object = User.query.filter_by(username=user['username']).first()
role_object = Role.query.filter_by(code=user['role']).first()
user_object.roles.append(role_object)
db.session.commit()
self.app = app.test_client()
self.test_users = ['user', 'operator', 'admin']
self.admin_authorization = 'Basic ' + base64.b64encode('admin:admin_password'.encode('utf-8')).decode('ascii')
self.operator_authorization = 'Basic ' + base64.b64encode('operator:operator_password'.encode('utf-8')).decode('ascii')
self.user_authorization = 'Basic ' + base64.b64encode('user:user_password'.encode('utf-8')).decode('ascii')
def tearDown(self):
db.drop_all()
def test_users_access(self):
"""
Проверяет доступ к /users для различных ролей пользователей.
:return:
"""
address = '/users'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
}
# Проверяет, что разрешённые методы доступа открыты только для соответствующих ролей
for auth in [self.admin_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 200)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
# Проверяет, что разрешённые методы доступа закрыты для соответствующих ролей
for auth in [self.operator_authorization, self.user_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 403, f'"GET /users" - Access forbidden for {auth}')
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
def test_get_users_created(self):
"""
Проверяет возможность получить созданного пользователя.
:return:
"""
username = 'test_user'
password = 'password'
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
address = '/users'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.admin_authorization,
}
result = self.app.get(address, headers=headers)
data = result.get_json()
for item in data:
if item['id'] == user.id:
self.assertEqual(item['username'], username,
'"GET /users" - Created user - Check username')
self.assertTrue(custom_app_context.verify(password, item['password_hash']),
'"GET /users" - Created user - Check password')
self.assertEqual(item['roles'], [],
'"GET /users" - Created user - Check roles')
def test_user_access(self):
"""
Проверяет доступ к /users/<id> для различных ролей пользователей.
:return:
"""
address = '/users/1'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
}
# Проверяет, что доступ открыт для соответствующих ролей
for auth in [self.admin_authorization]:
headers['Authorization'] = auth
result = self.app.put(address, headers=headers, json='')
self.assertEqual(result.status_code, 200, f'"GET /users" - Access granted')
# Проверяет, что доступ закрыт для соответствующих ролей
for auth in [self.operator_authorization, self.user_authorization]:
headers['Authorization'] = auth
result = self.app.put(address, headers=headers, json='')
self.assertEqual(result.status_code, 403, f'"GET /users" - Access forbidden')
def test_put_user_role(self):
"""
Проверяет назначение роли пользователю.
:return:
"""
username = 'test_user'
password = 'password'
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.admin_authorization,
}
address = f'/users/{user.id}'
json = {
'role': Config.ADMINISTRATOR_ROLE,
'status': '1'
}
result = self.app.put(address, headers=headers, json=json)
self.assertEqual(result.get_json()['error'], f'Could not assign role "{Config.ADMINISTRATOR_ROLE}"')
address = f'/users/{user.id}'
json = {
'role': Config.OPERATOR_ROLE,
'status': '2'
}
result = self.app.put(address, headers=headers, json=json)
self.assertEqual(result.get_json()['error'], 'No such status "2"')
address = f'/users/{user.id}'
json = ''
result = self.app.put(address, headers=headers, json=json)
self.assertEqual(result.get_json()['error'], 'Received incorrect data. Awaiting JSON {"role", "status"}')
address = f'/users/{user.id}'
json = {
'role': Config.OPERATOR_ROLE,
'status': '1'
}
result = self.app.put(address, headers=headers, json=json)
self.assertEqual(result.status_code, 200)
address = '/users'
result = self.app.get(address, headers=headers)
data = result.get_json()
for item in data:
if item['id'] == user.id:
roles = item['roles']
self.assertEqual(len(roles), 1)
self.assertEqual(roles[0]['code'], Config.OPERATOR_ROLE)
break
else:
self.fail('User ID not found.')
address = f'/users/{user.id}'
json = {
'role': Config.OPERATOR_ROLE,
'status': '0'
}
result = self.app.put(address, headers=headers, json=json)
self.assertEqual(result.status_code, 200)
address = '/users'
result = self.app.get(address, headers=headers)
data = result.get_json()
for item in data:
if item['id'] == user.id:
roles = item['roles']
self.assertEqual(len(roles), 0)
# data = result.get_json()
# for user in data:
# self.assertIn(user['username'], users, '')
# for role in user['roles']:
# self.assertIn(role, Config.ACCOUNT_ROLES, '')
def test_roles_access(self):
"""
Проверяет доступ к /roles для различных ролей пользователей.
:return:
"""
address = '/roles'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
}
# Проверяет, что разрешённые методы доступа открыты только для соответствующих ролей
for auth in [self.admin_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 200)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
# Проверяет, что разрешённые методы доступа закрыты для соответствующих ролей
for auth in [self.operator_authorization, self.user_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 403)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
def test_get_roles_created(self):
"""
Проверяет возможность получить созданную роль.
:return:
"""
code = 'test_role_code'
title = 'test_role_title'
role = Role(code=code, title=title)
db.session.add(role)
db.session.commit()
address = '/roles'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.admin_authorization,
}
result = self.app.get(address, headers=headers)
data = result.get_json()
for item in data:
if item['id'] == role.id:
self.assertEqual(item['code'], code)
self.assertEqual(item['title'], title)
break
else:
self.fail('Role ID not found.')
def test_requests_access(self):
"""
Проверяет доступ к /requests для различных ролей пользователей.
:return:
"""
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
}
# Проверяет, что разрешённые методы доступа открыты только для соответствующих ролей
for auth in [self.user_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 200)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 400)
result = self.app.post(address, headers=headers, json='')
self.assertEqual(result.status_code, 200)
self.assertEqual(result.get_json()['error'], 'Received incorrect data. Awaiting JSON {"text"}')
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
# Проверяет, что разрешённые методы доступа закрыты для соответствующих ролей
for auth in [self.operator_authorization, self.admin_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 403, f'GET {address}')
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 403, f'POST {address}')
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405, f'PUT {address}')
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405, f'DELETE {address}')
def test_get_requests(self):
"""
Проверяет возможность получить заявку.
:return:
"""
username = 'user'
status_code = Config.DRAFT_STATUS
user = User.query.filter_by(username=username).first()
status = Status.query.filter_by(code=status_code).first()
status_id = status.id
text = 'This is a test request.'
object = Request(
user=user,
status=status,
text=text
)
db.session.add(object)
db.session.commit()
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.get(address, headers=headers)
data = result.get_json()
for item in data:
if item['id'] == object.id:
self.assertIsNotNone(item['created_dt'])
self.assertIsNone(item['updated_dt'])
self.assertEqual(item['text'], 'This is a test request.')
self.assertEqual(item['status_id'], status_id)
break
else:
self.fail('Request ID not found.')
def test_post_request_by_user(self):
"""
Проверяет возможность Пользователя создать заявку .
:return:
"""
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
self.assertIsNotNone(data['created_dt'])
self.assertIsNone(data['updated_dt'])
self.assertEqual(data['status_id'], 1)
self.assertEqual(data['text'], text)
def test_post_request_by_opertaor(self):
"""
Проверяет возможность Оператора создать заявку.
:return:
"""
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
self.assertEqual(result.status_code, 403)
def test_post_request_by_administrator(self):
"""
Проверяет возможность Администратора создать заявку.
:return:
"""
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.admin_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
self.assertEqual(result.status_code, 403)
def test_get_request_by_owner(self):
"""
Проверяет возможность получить заявку Пользователем, который ей создал.
:return:
"""
username = 'user'
status_code = Config.DRAFT_STATUS
text = 'This is a test request.'
user = User.query.filter_by(username=username).first()
status = Status.query.filter_by(code=status_code).first()
instance = Request(
user=user,
status=status,
text=text
)
db.session.add(instance)
db.session.commit()
uri = f'/requests/{instance.id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.get(uri, headers=headers)
data = result.get_json()
self.assertEqual(data['id'], instance.id)
def test_get_request_by_another_user(self):
"""
Проверяет возможность получить заявку Пользователем, который не создавал её.
:return:
"""
# Создать заявку Пользователем user
username = 'user'
status_code = Config.DRAFT_STATUS
text = 'This is a test request.'
user = User.query.filter_by(username=username).first()
status = Status.query.filter_by(code=status_code).first()
request_instance = Request(
user=user,
status=status,
text=text
)
db.session.add(request_instance)
db.session.commit()
# Создать нового пользователя test_user
username = 'test_user'
password = 'password'
role_code = Config.USER_ROLE
user = User(username=username)
user.hash_password(password)
role = Role.query.filter_by(code=role_code).first()
user.roles.append(role)
db.session.add(user)
db.session.commit()
# Проверить доступ к заявке нового пользователя
uri = f'/requests/{request_instance.id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': 'Basic ' + base64.b64encode(f'{username}:{password}'.encode('utf-8')).decode('ascii'),
}
result = self.app.get(uri, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertFalse(data)
def test_get_request_by_operator(self):
"""
Проверяет возможность получить заявку Оператором.
:return:
"""
# Создать заявку Пользователем user
username = 'user'
status_code = Config.DRAFT_STATUS
text = 'This is a test request.'
user = User.query.filter_by(username=username).first()
status = Status.query.filter_by(code=status_code).first()
request_instance = Request(
user=user,
status=status,
text=text
)
db.session.add(request_instance)
db.session.commit()
# Проверить доступ к заявке Оператора
uri = f'/requests/{request_instance.id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.get(uri, headers=headers)
self.assertEqual(result.status_code, 403)
def test_get_request_by_administrator(self):
"""
Проверяет возможность получить заявку Администратором.
:return:
"""
# Создать заявку Пользователем user
username = 'user'
status_code = Config.DRAFT_STATUS
text = 'This is a test request.'
user = User.query.filter_by(username=username).first()
status = Status.query.filter_by(code=status_code).first()
request_instance = Request(
user=user,
status=status,
text=text
)
db.session.add(request_instance)
db.session.commit()
# Проверить доступ к заявке Оператора
uri = f'/requests/{request_instance.id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.admin_authorization,
}
result = self.app.get(uri, headers=headers)
self.assertEqual(result.status_code, 403)
def test_put_request_by_owner(self):
"""
Проверяет возможность Пользователя изменить свою заявку .
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Проверить возможность изменить заявку
updated_text = "Updated by owner"
address = f'/requests/{request_id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.put(address, headers=headers, json={'text': updated_text})
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], updated_text)
def test_put_request_by_another_user(self):
"""
Проверяет возможность Пользователя изменить чужую заявку .
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Создать нового пользователя test_user
username = 'test_user'
password = 'password'
role_code = Config.USER_ROLE
user = User(username=username)
user.hash_password(password)
role = Role.query.filter_by(code=role_code).first()
user.roles.append(role)
db.session.add(user)
db.session.commit()
# Проверить возможность изменить заявку
updated_text = "Updated by owner"
address = f'/requests/{request_id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': 'Basic ' + base64.b64encode(f'{username}:{password}'.encode('utf-8')).decode('ascii'),
}
result = self.app.put(address, headers=headers, json={'text': updated_text})
data = result.get_json()
self.assertEqual(result.status_code, 200)
answer_message_starts_with = f'No request with id = {request_id} for user {user.id} "{username}"'
self.assertEqual(data['error'][:len(answer_message_starts_with)], answer_message_starts_with)
def test_put_request_by_operator(self):
"""
Проверяет возможность Оператора изменить заявку .
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Проверить возможность изменить заявку
updated_text = "Updated by operator"
address = f'/requests/{request_id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.put(address, headers=headers, json={'text': updated_text})
data = result.get_json()
self.assertEqual(result.status_code, 403)
def test_put_request_by_administrator(self):
"""
Проверяет возможность Администратора изменить заявку .
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Проверить возможность изменить заявку
updated_text = "Updated by admin"
address = f'/requests/{request_id}'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.admin_authorization,
}
result = self.app.put(address, headers=headers, json={'text': updated_text})
data = result.get_json()
self.assertEqual(result.status_code, 403)
def test_send_request_by_owner(self):
"""
Проверяет возможность Пользователя отправить свою заявку.
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Проверить возможность отправить заявку на рассмотрение
address = f'/requests/{request_id}/send'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['id'], request_id)
self.assertEqual(data['text'], text)
self.assertEqual(data['status_id'], 2)
def test_send_request_by_another_user(self):
"""
Проверяет возможность Пользователя отправить чужую заявку .
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Создать нового пользователя test_user
username = 'test_user'
password = 'password'
role_code = Config.USER_ROLE
user = User(username=username)
user.hash_password(password)
role = Role.query.filter_by(code=role_code).first()
user.roles.append(role)
db.session.add(user)
db.session.commit()
# Проверить возможность изменить заявку
address = f'/requests/{request_id}/send'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': 'Basic ' + base64.b64encode(f'{username}:{password}'.encode('utf-8')).decode('ascii'),
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
answer_message_starts_with = f'No request with id = {request_id} for user {user.id} "{username}"'
self.assertEqual(data['error'][:len(answer_message_starts_with)], answer_message_starts_with)
def test_send_request_by_operator(self):
"""
Проверяет возможность Оператора отправить чужую заявку.
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Проверить возможность изменить заявку
address = f'/requests/{request_id}/send'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 403)
def test_send_request_by_administrator(self):
"""
Проверяет возможность Администратора отправить чужую заявку.
:return:
"""
# Создать тестовую заявку
text = 'This is a test request.'
address = '/requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.user_authorization,
}
result = self.app.post(address, headers=headers, json={'text': text})
data = result.get_json()
request_id = data['id']
self.assertEqual(result.status_code, 200)
self.assertEqual(data['text'], text)
# Проверить возможность изменить заявку
address = f'/requests/{request_id}/send'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.admin_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 403)
def test_get_sent_requests_access(self):
"""
Проверяет доступ к /sent_requests для различных ролей пользователей.
:return:
"""
address = '/sent_requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
}
# Проверяет, что разрешённые методы доступа открыты только для соответствующих ролей
for auth in [self.operator_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 200)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
# Проверяет, что разрешённые методы доступа закрыты для соответствующих ролей
for auth in [self.user_authorization, self.admin_authorization]:
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 403, f'GET {address}')
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 405, f'POST {address}')
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405, f'PUT {address}')
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405, f'DELETE {address}')
def test_get_sent_request_by_operator(self):
"""
Проверяет возможность Оператора получить список, содержащий одну заявку пользователя.
:return:
"""
# Создать заявки в статусе Отправлено
username = 'user'
status_code = Config.SENT_STATUS
user = User.query.filter_by(username=username).first()
status = Status.query.filter_by(code=status_code).first()
text = f'This is a test request'
request_instance = Request(
user=user,
status=status,
text=text
)
db.session.add(request_instance)
db.session.commit()
uri = f'/sent_requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.get(uri, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['text'], 'T-h-i-s- -i-s- -a- -t-e-s-t- -r-e-q-u-e-s-t')
def test_get_sent_requests_by_operator(self):
"""
Проверяет возможность Оператора получить список отправленных заявок пользователей.
:return:
"""
# Создать заявки в статусе Отправлено
username = 'user'
status_code = Config.SENT_STATUS
user = User.query.filter_by(username=username).first()
status = Status.query.filter_by(code=status_code).first()
for i in range(10):
text = f'This is a test request #{i}'
request_instance = Request(
user=user,
status=status,
text=text
)
db.session.add(request_instance)
db.session.commit()
uri = f'/sent_requests'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.get(uri, headers=headers)
data = result.get_json()
self.assertEqual(len(data), 10)
requests = [f'T-h-i-s- -i-s- -a- -t-e-s-t- -r-e-q-u-e-s-t- -#-{i}' for i in range(10)]
for item in data:
requests.pop(requests.index(item['text']))
self.assertEqual(len(requests), 0)
def test_accept_request_access(self):
"""
Проверяет доступ к /requests/<id>/accept для различных ролей пользователей.
:return:
"""
# Создать заявки в статусе Отправлено
username = 'user'
user = User.query.filter_by(username=username).first()
status_code = Config.SENT_STATUS
status = Status.query.filter_by(code=status_code).first()
text = f'This is a test request'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
}
# Проверяет, что разрешённые методы доступа открыты только для соответствующих ролей
for auth in [self.operator_authorization]:
request_instance = Request(user=user, status=status, text=text)
db.session.add(request_instance)
db.session.commit()
address = f'/requests/{request_instance.id}/accept'
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 200)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
# Проверяет, что разрешённые методы доступа закрыты для соответствующих ролей
for auth in [self.user_authorization, self.admin_authorization]:
request_instance = Request(user=user, status=status, text=text)
db.session.add(request_instance)
db.session.commit()
address = f'/requests/{request_instance.id}/accept'
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 403)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
def test_accept_request_by_operator(self):
"""
Проверяет возможность Оператора принять заявку.
:return:
"""
# Создать заявки в статусе Отправлено
username = 'user'
user = User.query.filter_by(username=username).first()
status_code = Config.SENT_STATUS
status = Status.query.filter_by(code=status_code).first()
text = f'This is a test request'
request_instance = Request(user=user, status=status, text=text)
db.session.add(request_instance)
db.session.commit()
# Проверить возможность принять заявку
address = f'/requests/{request_instance.id}/accept'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['id'], request_instance.id)
self.assertEqual(data['text'], text)
self.assertEqual(data['status_id'], 3)
def test_accept_non_existent_request_by_operator(self):
"""
Проверяет возможность Оператора принять несуществующую заявку.
:return:
"""
request_id = 'non-existent-request'
# Проверить возможность принять заявку
address = f'/requests/{request_id}/accept'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['error'], f'No request with id = {request_id} with SENT status.')
def test_accept_request_with_wrong_status_by_operator(self):
"""
Проверяет возможность Оператора принять заявку в статусе, отличном от Отправлено.
:return:
"""
wrong_statuses = [
Config.DRAFT_STATUS,
Config.ACCEPTED_STATUS,
Config.REJECTED_STATUS,
]
username = 'user'
user_instance = User.query.filter_by(username=username).first()
text = f'This is a test request'
for status in wrong_statuses:
with self.subTest(status=status):
# self.assertEqual(a, b, name)
status_code = status
status_instance = Status.query.filter_by(code=status_code).first()
request_instance = Request(user=user_instance, status=status_instance, text=text)
db.session.add(request_instance)
db.session.commit()
# Проверить возможность принять заявку
address = f'/requests/{request_instance.id}/accept'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['error'], f'No request with id = {request_instance.id} with SENT status.', status)
def test_reject_request_access(self):
"""
Проверяет доступ к /requests/<id>/reject для различных ролей пользователей.
:return:
"""
# Создать заявки в статусе Отправлено
username = 'user'
user = User.query.filter_by(username=username).first()
status_code = Config.SENT_STATUS
status = Status.query.filter_by(code=status_code).first()
text = f'This is a test request'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
}
# Проверяет, что разрешённые методы доступа открыты только для соответствующих ролей
for auth in [self.operator_authorization]:
request_instance = Request(user=user, status=status, text=text)
db.session.add(request_instance)
db.session.commit()
address = f'/requests/{request_instance.id}/reject'
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 200)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
# Проверяет, что разрешённые методы доступа закрыты для соответствующих ролей
for auth in [self.user_authorization, self.admin_authorization]:
request_instance = Request(user=user, status=status, text=text)
db.session.add(request_instance)
db.session.commit()
address = f'/requests/{request_instance.id}/reject'
headers['Authorization'] = auth
result = self.app.get(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.post(address, headers=headers)
self.assertEqual(result.status_code, 403)
result = self.app.put(address, headers=headers)
self.assertEqual(result.status_code, 405)
result = self.app.delete(address, headers=headers)
self.assertEqual(result.status_code, 405)
def test_reject_request_by_operator(self):
"""
Проверяет возможность Оператора отклонять заявку.
:return:
"""
# Создать заявки в статусе Отправлено
username = 'user'
user = User.query.filter_by(username=username).first()
status_code = Config.SENT_STATUS
status = Status.query.filter_by(code=status_code).first()
text = f'This is a test request'
request_instance = Request(user=user, status=status, text=text)
db.session.add(request_instance)
db.session.commit()
# Проверить возможность принять заявку
address = f'/requests/{request_instance.id}/reject'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['id'], request_instance.id)
self.assertEqual(data['text'], text)
self.assertEqual(data['status_id'], 4)
def test_reject_non_existent_request_by_operator(self):
"""
Проверяет возможность Оператора отклонить несуществующую заявку.
:return:
"""
request_id = 'non-existent-request'
# Проверить возможность принять заявку
address = f'/requests/{request_id}/reject'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['error'], f'No request with id = {request_id} with SENT status.')
def test_reject_request_with_wrong_status_by_operator(self):
"""
Проверяет возможность Оператора отклонить заявку в статусе, отличном от Отправлено.
:return:
"""
wrong_statuses = [
Config.DRAFT_STATUS,
Config.ACCEPTED_STATUS,
Config.REJECTED_STATUS,
]
username = 'user'
user_instance = User.query.filter_by(username=username).first()
text = f'This is a test request'
for status in wrong_statuses:
with self.subTest(status=status):
status_code = status
status_instance = Status.query.filter_by(code=status_code).first()
request_instance = Request(user=user_instance, status=status_instance, text=text)
db.session.add(request_instance)
db.session.commit()
# Проверить возможность принять заявку
address = f'/requests/{request_instance.id}/reject'
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': self.operator_authorization,
}
result = self.app.post(address, headers=headers)
data = result.get_json()
self.assertEqual(result.status_code, 200)
self.assertEqual(data['error'], f'No request with id = {request_instance.id} with SENT status.', status)
if __name__ == '__main__':
unittest.main()
| 35.156228 | 127 | 0.595312 | 5,263 | 49,957 | 5.528786 | 0.049021 | 0.069077 | 0.041102 | 0.07516 | 0.900715 | 0.872294 | 0.859372 | 0.82772 | 0.799505 | 0.785174 | 0 | 0.00848 | 0.289469 | 49,957 | 1,420 | 128 | 35.180986 | 0.811275 | 0.105571 | 0 | 0.766316 | 0 | 0.002105 | 0.157588 | 0.046232 | 0 | 0 | 0 | 0 | 0.148421 | 1 | 0.045263 | false | 0.028421 | 0.007368 | 0 | 0.054737 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
164f99fbccf47d79142126e11789ea6588ac008b | 291,800 | py | Python | generated-libraries/python/netapp/snapmirror/__init__.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/snapmirror/__init__.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/snapmirror/__init__.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | from netapp.connection import NaConnection
from snapmirror_promote_iter_info import SnapmirrorPromoteIterInfo # 3 properties
from snapmirror_initialize_iter_key_td import SnapmirrorInitializeIterKeyTd # 14 properties
from snapmirror_release_iter_info import SnapmirrorReleaseIterInfo # 3 properties
from snapmirror_resume_iter_info import SnapmirrorResumeIterInfo # 3 properties
from snapmirror_modify_iter_key_td import SnapmirrorModifyIterKeyTd # 14 properties
from snapmirror_modify_iter_info import SnapmirrorModifyIterInfo # 3 properties
from snapmirror_break_iter_key_td import SnapmirrorBreakIterKeyTd # 14 properties
from snapmirror_destroy_iter_info import SnapmirrorDestroyIterInfo # 3 properties
from snapmirror_destination_info import SnapmirrorDestinationInfo # 13 properties
from snapmirror_error import SnapmirrorError # 2 properties
from snapmirror_promote_iter_key_td import SnapmirrorPromoteIterKeyTd # 14 properties
from snapmirror_resume_iter_key_td import SnapmirrorResumeIterKeyTd # 14 properties
from snapmirror_initialize_iter_info import SnapmirrorInitializeIterInfo # 5 properties
from snapmirror_quiesce_iter_key_td import SnapmirrorQuiesceIterKeyTd # 14 properties
from snapmirror_abort_iter_info import SnapmirrorAbortIterInfo # 3 properties
from snapmirror_destroy_iter_key_td import SnapmirrorDestroyIterKeyTd # 14 properties
from snapmirror_resync_iter_info import SnapmirrorResyncIterInfo # 5 properties
from snapmirror_status_info import SnapmirrorStatusInfo # 17 properties
from snapmirror_get_destination_iter_key_td import SnapmirrorGetDestinationIterKeyTd # 7 properties
from address_pair import AddressPair # 2 properties
from snapmirror_update_iter_info import SnapmirrorUpdateIterInfo # 5 properties
from snapmirror_sync_schedule_info import SnapmirrorSyncScheduleInfo # 9 properties
from snapmirror_resync_iter_key_td import SnapmirrorResyncIterKeyTd # 14 properties
from snapmirror_info import SnapmirrorInfo # 43 properties
from snapmirror_update_iter_key_td import SnapmirrorUpdateIterKeyTd # 14 properties
from snapmirror_check_iter_key_td import SnapmirrorCheckIterKeyTd # 14 properties
from snapmirror_schedule_info import SnapmirrorScheduleInfo # 10 properties
from snapshot_owner_name import SnapshotOwnerName # 0 properties
from snapmirror_abort_iter_key_td import SnapmirrorAbortIterKeyTd # 14 properties
from snapmirror_connection_info import SnapmirrorConnectionInfo # 5 properties
from snapmirror_break_iter_info import SnapmirrorBreakIterInfo # 3 properties
from snapmirror_release_iter_key_td import SnapmirrorReleaseIterKeyTd # 7 properties
from snapmirror_quiesce_iter_info import SnapmirrorQuiesceIterInfo # 3 properties
from snapmirror_get_iter_key_td import SnapmirrorGetIterKeyTd # 14 properties
from snapmirror_check_iter_info import SnapmirrorCheckIterInfo # 5 properties
from destination_info import DestinationInfo # 3 properties
class SnapmirrorConnection(NaConnection):
def snapmirror_get_status(self, location=None):
"""
Return the SnapMirror status. This API can be issued
on either the source or destination filer.
:param location: The source location or destination location of the
SnapMirror pair. Possible types are volume or qtree only.
If this input is provided, only the SnapMirror relationships
with a matching source or destination will be reported.
The argument is invalid if the named location doesn't exist.
In this case, snapmirror-status-info output will not be present.
The argument can also be invalid if it is a flexclone name.
(Be aware that the snapmirror-list-destinations API can return
flexclone names.) Then snapmirror-get-status API will return
a snapmirror-status output value with a "state" of "unknown".
If the argument is not specified, all source, destination
SnapMirror pairs are returned.
"""
return self.request( "snapmirror-get-status", {
'location': [ location, 'location', [ basestring, 'None' ], False ],
}, {
'snapmirror-status': [ SnapmirrorStatusInfo, True ],
'is-available': [ bool, False ],
} )
def snapmirror_list_destinations(self, source_location=None):
"""
Returns a list of destination locations and information
about SnapMirror relationships for given source
locations, which can be a volume name or qtree path.
This API must be issued on the source filer.
:param source_location: Source location of the SnapMirror pair. The source
location is of the volume form: <filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
If the source-location is not specified, then all source,
destination SnapMirror pairs are returned.
"""
return self.request( "snapmirror-list-destinations", {
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
}, {
'destinations': [ DestinationInfo, True ],
} )
def snapmirror_get(self, source_vserver=None, source_volume=None, desired_attributes=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-get API returns information for a SnapMirror
relationship.
<p>
On Data ONTAP 8.1 operating in Cluster-Mode this API can be
issued on the source cluster or destination cluster of the
relatonship.
On Data ONTAP 8.2 operating in Cluster-Mode, this API must be
issued on the destination Vserver or the destination cluster of
the relationship.
The destination endpoint must be specified when using the
snapmirror-get API.
</p>
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param desired_attributes: Specify the attributes that should be returned.
If not present, all attributes for which information is available
will be returned.
If present, only the desired attributes for which information is
available will be returned.
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-get", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'desired_attributes': [ desired_attributes, 'desired-attributes', [ SnapmirrorInfo, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'attributes': [ SnapmirrorInfo, False ],
} )
def snapmirror_resume(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
Enables future transfers for a SnapMirror relationship that
has been quiesced. If there is a scheduled transfer, it will
be triggered on the next schedule. If there is a restart
checkpoint, it will be re-used if possible.
On Data ONTAP Cluster-Mode, If applied on a load-sharing
SnapMirror relationship, transfers will resume for all the
relationships of the set.
When a quiesced SnapMirror relationship is resumed, it remains in
that state across reboots and fail-overs.
The relationship must exist on the destination and you must
specify the destination end point when using snapmirror-resume.
This API must be issued on the destination storage system on
Data ONTAP operating in 7-Mode, on the destination cluster on
Data ONTAP 8.1 operating in Cluster-Mode, and on the destination
Vserver on Data ONTAP 8.2 or later operating in Cluster-Mode.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source volume.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_volume: Specifies the source volume of the SnapMirror relationship. The
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter.
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, when specifying a
destination endpoint, you must use either the destination
location, or the destination cluster, destination Vserver,
and destination volume.
This parameter is mandatory on Data ONTAP operating in 7-mode.
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, When specifying a
source endpoint, you must use either the source location, or the
source cluster, source Vserver, and source volume.
:param destination_cluster: Specifies the destination cluster of the SnapMirror relationship.
The destination Vserver and destination volume must also be
specified if using this parameter.
"""
return self.request( "snapmirror-resume", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_promote_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None):
"""
The snapmirror-promote-iter API performs failover for one or more
SnapMirror relationships.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
"""
return self.request( "snapmirror-promote-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorPromoteIterInfo, True ],
'failure-list': [ SnapmirrorPromoteIterInfo, True ],
} )
def snapmirror_initialize(self, source_vserver=None, source_volume=None, destination_snapshot=None, transfer_priority=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, source_snapshot=None, max_transfer_rate=None, destination_cluster=None):
"""
Performs the initial update of a SnapMirror relationship.
You must specify the destination endpoint when using
snapmirror-initialize.
This API must be used from the destination storage system on
Data ONTAP operating in 7-Mode, from the destination cluster
on Data ONTAP 8.1 operating in Cluster-Mode, and from the
destination Vserver on Data ONTAP 8.2 or later operating
in Cluster-Mode.
<p>
On Data ONTAP operating in 7-Mode, If the destination endpoint
is a volume, the volume must be in the restricted state.
If the destination endpoint is a qtree, the qtree must not
already exist.
<p>
On Data ONTAP operating in Cluster-Mode, this API is usually
used after the snapmirror-create API, but it can be used alone,
that is, without the snapmirror-create API, to create and
initially update a SnapMirror relationship.
<p>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP
8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating
Cluster-Mode (The relationship-control-plane field is set to
'v1'), a job will be spawned to operate on the SnapMirror
relationship, and the job id will be returned. The progress of
the job can be tracked using the job APIs.
<p>
On Data ONTAP 8.2 or later operating in Cluster-Mode, for
vault relationships, a 32-bit volume cannot be the source
or destination of the relationship.
<p>
On Data ONTAP 8.2 or later operating in Cluster-Mode, you
can track the progress of the operation using the
snapmirror-get API, except for relationships using a control
plane compatible with Data ONTAP 8.1 operating in Cluster-Mode.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source volume.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_volume: Specifies the source volume of the SnapMirror relationship. The
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_snapshot: Creates the specified snapshot (in addition to the regular
SnapMirror snapshot) on the destination after the qtree
SnapMirror transfer is over.
:param transfer_priority: Specifies the priority at which the transfer runs.
Possible values are: "normal", and "low". The default
value is the value specified in the snapmirror policy which
is associated with the relationship.
<p>This parameter only applies on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane
is 'v2'.
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter.
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, when specifying a
destination endpoint, you must use either the destination
location, or the destination cluster, destination Vserver, and
destination volume.
On Data ONTAP operating in 7-Mode, if the destination endpoint
is a volume, the volume must be in the restricted state.
If the destination endpoint is a qtree, the qtree must not
already exist.
<p> This parameter is mandatory on Data ONTAP operating in
7-mode.
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode when specifying a
source endpoint, you must use either the source location, or the
source cluster, source Vserver, and source volume.
On Data ONTAP operating in 7-Mode, If the source-location is not
specified, then the source in /etc/snapmirror.conf for the
destination path is used.
:param source_snapshot: Designates the source snapshot to use for a qtree update
on Data ONTAP operating in 7-mode, and the snapshot on the
source volume to use for the baseline transfer
on Data ONTAP 8.2 or later operating in
Cluster-Mode.
The default creates new snapshot on the source for the
transfer.
<p>This parameter only applies on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v2'.
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth.
On Data ONTAP operating in Cluster-Mode, the max-transfer-rate
option does not affect load-sharing transfers and transfers for
other relationships with Relationship Capability of Pre 8.2
confined to a single cluster.
:param destination_cluster: Specifies the destination cluster of the SnapMirror relationship.
The destination Vserver and destination volume must also be
specified if using this parameter.
"""
return self.request( "snapmirror-initialize", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'destination_snapshot': [ destination_snapshot, 'destination-snapshot', [ basestring, 'None' ], False ],
'transfer_priority': [ transfer_priority, 'transfer-priority', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'source_snapshot': [ source_snapshot, 'source-snapshot', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_promote(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-promote API performs a failover to the destination
volume of a load-sharing SnapMirror relationship. This API
changes the destination volume from a load-sharing volume to a
read-write volume and makes the destination volume assume the
identity of the source volume. The API then destroys the original
source volume. The destination volume must be a load-sharing
volume. However, you can still perform 'snapmirror-promote' on a
destination load-sharing volume that has been left in read-write
state by a previously failed promote operation.
You must specify the destination endpoint when using
snapmirror-promote.
Note: The source volume and destination volume must be on the
same Vserver for the promote to work.
Client accesses are redirected from the original source volume to
the promoted destination volume. The view clients see on the
promoted destination volume is the latest transferred Snapshot
copy, which might lag behind the view clients had of the original
source volume before the promote.
The SnapMirror relationship is usually deleted as part of the
promotion process. This is not true for a set of load-sharing
mirrors that contain more than one destination volume. In this
case, the promoted destination volume becomes the new source
volume to the set of load-sharing mirrors.
It is possible that the original source volume is the source of
multiple SnapMirror relationships. For such a configuration, the
promoted destination volume becomes the new source volume of the
other SnapMirror relationships.
The snapmirror-promote API fails if a SnapMirror transfer is in
progress for any SnapMirror relationship involving the original
source volume.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-promote", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_restore(self, disable_storage_efficiency=None, source_vserver=None, source_volume=None, transfer_priority=None, source_cluster=None, tries=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, source_snapshot=None, max_transfer_rate=None, clean_up_failure=None, destination_cluster=None):
"""
The snapmirror-restore API restores the contents of a selected
Snapshot copy from the source volume to the active file system of
the destination volume. The destination volume is read write
after the data is restored.
The snapmirror-restore API creates a temporary SnapMirror
relationship of the type RST. For this reason the destination
volume cannot be the destination volume of another SnapMirror
relationship. The temporary RST relationship is deleted after the
operation completes successfully.
If the destination volume is an empty data protection volume, the
snapmirror-restore API performs a baseline restore. The baseline
restore transfers the contents of the selected Snapshot copy from
the source volume to the destination volume.
If the destination volume is read-write and has at least one
Snapshot which also appears on the source volume, an incremental
restore is performed.
The destination volume is made read only, if is not already read
only. When the destination volume is made read only the latest
snapshot on destination volume is made the exported snapshot.
Note, that any data written to the destination volume since the
last snapshot is lost during this operation. To save the contents
of the active file system on the destination volume, stop all
client access and take a snapshot before starting this
operation.
During an incremental restore a common Snapshot copy is selected.
The snapshot chosen as the common snapshot must be on the source
and destination volumes. The contents of the common Snapshot copy
on the destination volume are copied to the active file system of
the destination volume. Then the contents of the Snapshot copy to
be restored are transferred from the source volume to the
destination volume.
Unless 'source-snapshot' is specified the most recent Snapshot
copy on the source volume is restored.
This API may be used to restart a failed snapmirror-restore
operation.
This API may be used to terminate an aborted or failed
snapmirror-restore operation by specifying the 'clean-up-failure'
parameter set to 'true'. Terminating an aborted or failed
snapmirror-restore operation deletes any residual temporary RST
relationship and converts the destination volume back to
read-write if the destination volume was read-write prior to
initial snapmirror-restore operation while removing any data
transferred or copied during the snapmirror-restore operation.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param disable_storage_efficiency: The default behavior of restore is to preserve storage efficiency
when possible. Use this parameter with a value of 'true' to turn
off storage efficiency for data transferred over the wire and
written to the destination volume. The default value for this
parameter is false.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param transfer_priority: Specifies the priority at which the transfer runs. Possible
values are: 'normal', and 'low'. The default value for this
parameter is 'normal'.
Possible values:
<ul>
<li> "low" ,
<li> "normal"
</ul>
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param tries: Specifies the total number of attempts to transfer data. Valid
input is a positive integer or 'unlimited'. The default is '8'.
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param source_snapshot: Specifies the Snapshot from the source to be restored.
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth. The max-transfer-rate option does not affect
load-sharing transfers and transfers for other relationships with
Relationship Capability of Pre 8.2 confined to a single cluster.
:param clean_up_failure: Use this parameter with the value of 'true' to recover from an
aborted or failed restore operation. If the destination volume
was read-write prior to the failed or aborted restore operation,
it is converted back to read-write if necessary while removing
all data transferred or copied during the restore operation. Any
residual temporary RST relationship is also removed from the
destination Vserver. An attempt is made to remove any residual
temporary RST relationship from the source Vserver. The default
value for this parameter is 'false'.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-restore", {
'disable_storage_efficiency': [ disable_storage_efficiency, 'disable-storage-efficiency', [ bool, 'None' ], False ],
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'transfer_priority': [ transfer_priority, 'transfer-priority', [ basestring, 'sm-transfer-priority-enum' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'tries': [ tries, 'tries', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'source_snapshot': [ source_snapshot, 'source-snapshot', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
'clean_up_failure': [ clean_up_failure, 'clean-up-failure', [ bool, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_initialize_ls_set(self, source_cluster=None, source_vserver=None, source_location=None, source_volume=None):
"""
The snapmirror-initialize-ls-set API performs the initial manual
update of a set of load-sharing mirrors. This API is usually used
after the snapmirror-create API is used to create a SnapMirror
relationship for each of the destination volumes in the set of
load-sharing mirrors.
You must specify the source endpoint when using
snapmirror-initialize-ls-set.
Data and Snapshot copies are transferred from the source volume
to all up-to-date destination volumes in the set of load-sharing
mirrors.
Use the snapmirror-initialize API to add and initialize a new
destination volume to an existing set of load-sharing mirrors.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter. This parameter is supported only in cluster
context.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship. The
source cluster and source volume must also be specified if using
this parameter.
:param source_location: Specifies the source endpoint of the SnapMirror relationship.
When specifying a source endpoint, you must use either the source
location, or the source cluster, source Vserver, and source
volume.
:param source_volume: Specifies the source volume of the SnapMirror relationship. The
source cluster and source Vserver must also be specified if using
this parameter. This parameter may be optional if executed
outside cluster context.
"""
return self.request( "snapmirror-initialize-ls-set", {
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_get_total_records(self):
"""
Obtain the total number of SnapMirror relationships. This is a
point in time estimate and may be different on subsequent calls.
"""
return self.request( "snapmirror-get-total-records", {
}, {
'count': [ int, False ],
} )
def snapmirror_quiesce_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None):
"""
Disables future transfers for one or more SnapMirror
relationships.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
"""
return self.request( "snapmirror-quiesce-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorQuiesceIterInfo, True ],
'failure-list': [ SnapmirrorQuiesceIterInfo, True ],
} )
def snapmirror_snapshot_owner_get_snapshots(self, vserver, volume):
"""
List all Snapshot copies that are preserved for a SnapMirror
mirror-to-vault cascade configuration.
:param vserver: Vserver Name
:param volume: Volume Name
"""
return self.request( "snapmirror-snapshot-owner-get-snapshots", {
'vserver': [ vserver, 'vserver', [ basestring, 'vserver-name' ], False ],
'volume': [ volume, 'volume', [ basestring, 'volume-name' ], False ],
}, {
'snapshots': [ basestring, True ],
} )
def snapmirror_break_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None):
"""
The snapmirror-break-iter API breaks one or more SnapMirror
relationships.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
"""
return self.request( "snapmirror-break-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorBreakIterInfo, True ],
'failure-list': [ SnapmirrorBreakIterInfo, True ],
} )
def snapmirror_modify(self, source_vserver=None, source_volume=None, schedule=None, vserver=None, source_cluster=None, tries=None, destination_vserver=None, destination_location=None, policy=None, destination_volume=None, source_location=None, max_transfer_rate=None, destination_cluster=None):
"""
The snapmirror-modify API changes one or more parameters of a
SnapMirror relationship. The key parameter that identifies any
SnapMirror relationship is the destination volume.
<p>
You must specify the destination endpoint when using the
snapmirror-modify API.
For load-sharing mirrors, a change to a parameter affects all of
the SnapMirror relationships in the set of load-sharing mirrors.
Destination volumes in a set of load-sharing mirrors do not have
individual parameter settings.
Changes made by the snapmirror-modify API do not take effect
until the next manual or scheduled update of the SnapMirror
relationship. Changes do not affect updates that have started and
have not finished yet.
On Data ONTAP 8.1 operating in Cluster-Mode this API must be
issued on the destination Cluster.
On Data ONTAP 8.2 operating in Cluster-Mode, this API must be
issued on the destination Vserver if operating in a Vserver
context and on the destination cluster if operating in a cluster
context.
This API is not supported on Infinite Volume constituents.</p>
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param schedule: Specifies the name of the cron schedule, used to update the
SnapMirror relationship.
:param vserver: If this optional parameter is specified, designates the managing
Vserver. The managing Vserver is authorized to use snapmirror
commands to manage the SnapMirror relationship. The vserver
option is currently a reserved option.
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param tries: Specifies the maximum number of times to attempt each manual or
scheduled transfer for a SnapMirror relationship. The default is
eight times.
Note: You can set the tries option to zero (0) to disable manual
and scheduled updates for the SnapMirror relationship. This
parameter is only relevant on Data ONTAP 8.1 operating in
Cluster-Mode. On Data ONTAP 8.2 operating in Cluster-Mode, the
maximum number of times to attempt a transfer is an attribute of
the SnapMirror policy. Therefore the value of this parameter is
ignored.
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param policy: Specifies the name of the SnapMirror policy that applies to this
relationship.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth. The max-transfer-rate option does not affect
load-sharing transfers and transfers for other relationships with
Relationship Capability of Pre 8.2 confined to a single cluster.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-modify", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'schedule': [ schedule, 'schedule', [ basestring, 'None' ], False ],
'vserver': [ vserver, 'vserver', [ basestring, 'vserver-name' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'tries': [ tries, 'tries', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'policy': [ policy, 'policy', [ basestring, 'sm-policy' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_abort_iter(self, query, check_only=None, max_failure_count=None, clear_checkpoint=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None):
"""
The snapmirror-abort-iter API aborts SnapMirror transfers for one
or more SnapMirror relationships. This API is not supported on
Infinite Volume constituents.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param check_only: If this option is specified true, only snapmirror-check
operations active on the relationship will be aborted.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param clear_checkpoint: If this option is specified true, the restart checkpoint is
discarded and the destination volume is restored to the last
Snapshot copy that was successfully transferred. You can use the
clear-checkpoint option to discard the restart checkpoint of a
previous transfer attempt which forces the subsequent transfer to
start with a fresh Snapshot copy on the destination volume.
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
"""
return self.request( "snapmirror-abort-iter", {
'check_only': [ check_only, 'check-only', [ bool, 'None' ], False ],
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'clear_checkpoint': [ clear_checkpoint, 'clear-checkpoint', [ bool, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorAbortIterInfo, True ],
'failure-list': [ SnapmirrorAbortIterInfo, True ],
} )
def snapmirror_throttle(self, destination_location, max_transfer_rate):
"""
Changes the max transfer rate of an active transfer.
The API can be issued to either the source or the
destination filer.
:param destination_location: The destination location of the active transfer. The
destination location is of the volume form:
<filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
:param max_transfer_rate: Maximum transfer rate in kilobytes per second. A value '0'
disables the throttle, ie. the filer will transfer as fast
as it can. Range: 0..2^32-1
"""
return self.request( "snapmirror-throttle", {
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
}, {
} )
def snapmirror_check_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None, target_snapshot=None):
"""
The snapmirror-check-iter API checks one or more SnapMirror
relationships.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on or
scheduled to be worked on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
:param target_snapshot: Specifies the Snapshot copy on the destination endpoint to
check.
"""
return self.request( "snapmirror-check-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'target_snapshot': [ target_snapshot, 'target-snapshot', [ basestring, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorCheckIterInfo, True ],
'failure-list': [ SnapmirrorCheckIterInfo, True ],
} )
def snapmirror_delete_schedule(self, destination_location):
"""
Delete the schedule for a given destination. This API
must be executed on the destination filer.
:param destination_location: The destination location of a schedule to delete. The
destination location is of the volume form:
<filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
"""
return self.request( "snapmirror-delete-schedule", {
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_snapshot_owner_destroy(self, vserver, volume, snapshot, snapshot_owner_name=None):
"""
Delete an owner used to preserve a Snapshot copy for a SnapMirror
mirror-to-vault cascade configuration.
:param vserver: Vserver Name
:param volume: Volume Name
:param snapshot: Snapshot Copy Name
:param snapshot_owner_name: The specified owner is removed from the Snapshot copy. If no
owner name is provided, the system will look for and remove the
internal default name for the owner. A specified owner of '*'
removes all owners from the Snapshot copy.
"""
return self.request( "snapmirror-snapshot-owner-destroy", {
'vserver': [ vserver, 'vserver', [ basestring, 'vserver-name' ], False ],
'volume': [ volume, 'volume', [ basestring, 'volume-name' ], False ],
'snapshot': [ snapshot, 'snapshot', [ basestring, 'snapshot-id' ], False ],
'snapshot_owner_name': [ snapshot_owner_name, 'snapshot-owner-name', [ basestring, 'snapshot-owner-name' ], False ],
}, {
} )
def snapmirror_delete_connection(self, connection):
"""
Deletes a connection specified by connection.
This API must be executed on the destination filer.
Currently, the connections are in: /etc/snapmirror.conf.
:param connection: Connection name to delete. The name is in ASCII and
must begin with an alpha character.
"""
return self.request( "snapmirror-delete-connection", {
'connection': [ connection, 'connection', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_destroy(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-destroy API removes only the SnapMirror
relationship of a source volume and a destination volume, the
volumes are not destroyed and Snapshot copies on the volumes are
not removed.
You must specify the destination endpoint when using
snapmirror-destroy.
On Data ONTAP 8.1 operating in Cluster-Mode, the
snapmirror-destroy API fails if a SnapMirror transfer for the
SnapMirror relationship is in progress.
On Data ONTAP 8.2 operating in Cluster-Mode, the
snapmirror-destroy API will attempt to abort any ongoing
transfer. However, failure to abort the transfer will not cause
the API to fail.
A set of load-sharing mirrors might contain multiple destination
volumes, each destination volume having a separate SnapMirror
relationship with the common source volume. When used on one of
the SnapMirror relationships from the set of load-sharing
mirrors, the snapmirror-destroy API deletes the specified
SnapMirror relationship from the set of load-sharing mirrors.
The snapmirror-destroy API preserves the read-write or read-only
attributes of the volumes of a SnapMirror relationship after the
relationship is deleted. Therefore, a read-write volume that was
the source of a SnapMirror relationship retains its read-write
attributes, and a data protection volume or a load-sharing volume
that was a destination of a SnapMirror relationship retains its
read-only attributes.
Note: When a SnapMirror relationship from a set of load-sharing
mirrors is deleted, the destination volume becomes a data
protection volume and retains the read-only attributes of a data
protection volume.
On Data ONTAP 8.2 operating in Cluster-Mode, this API can only be
issued on the destination Vserver. The SnapMirror relationship
information is deleted from destination Vserver, but no cleanup
or deletion is performed on the source Vserver. The
snapmirror-release API must be issued on the source Vserver to
delete the relationship information on the source Vserver.
On Data ONTAP 8.1 operating in Cluster-Mode, this API can be
issued on the source or on the destination cluster. When issued
on the destination cluster, the SnapMirror relationship
information on the source and destination clusters is deleted.
When issued on the source cluster, only the SnapMirror
relationship information on the source cluster is deleted.
Note: If the SnapMirror relationship identified by the
destination endpoint does not exist, this API will return
success.
This API is not supported if the destination end point is a
Infinite Volume.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-destroy", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_destroy_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None):
"""
Removes one or several SnapMirror relationships. This API is not
supported on Infinite Volume constituents.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
"""
return self.request( "snapmirror-destroy-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorDestroyIterInfo, True ],
'failure-list': [ SnapmirrorDestroyIterInfo, True ],
} )
def snapmirror_set_schedule(self, hours, destination_location, days_of_month, source_location, minutes, days_of_week, is_compressed=None, max_transfer_rate=None, tcp_window_size=None, restart=None, connection_mode=None):
"""
Sets the schedule for a given destination. The API must
be executed on the destination filer. Currently,
the schedule is in /etc/snapmirror.conf.
:param hours: Hours in the day for which the schedule is set.
The form is crontab-like, with possible values of:
<ul>
<li> - := match nothing;
<li> 1 := match hour 1;
<li> 1,3 := match hour 1 and 3;
<li> 2-5 := match hour 2,3,4,5;
<li> 1-24/3 := match hour 1,4,7,10,13,16,19,22;
<li> * := matches all possible legal values;
</ul>
:param destination_location: The destination location of a schedule to set. The
destination location is of the volume form:
<filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
:param days_of_month: Minutes in the hour for which the schedule is set.
The form is crontab-like, with possible values of:
<ul>
<li> - := match nothing;
<li> 1 := match day 1;
<li> 1,3 := match day 1 and 3;
<li> 2-5 := match day 2,3,4,5;
<li> 1-30/7 := match day 1,8,15,22,29;
<li> * := matches all possible legal values;
</ul>
:param source_location: The source location of a schedule to set. The source
location is of the volume form: <filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
:param minutes: Minutes in the hour for which the schedule is set.
The form is crontab-like, with possible values of:
<ul>
<li> - := match nothing;
<li> 1 := match minute 1;
<li> 1,3 := match minute 1 and 3;
<li> 2-5 := match minute 2,3,4,5;
<li> 1-12/3 := match minute 1,4,7,10;
<li> 0-55/5 := match minute 0,5,10,15,20,25,30,35,40,45,50,55;
<li> * := matches all possible legal values;
</ul>
:param days_of_week: Days in the week for which the schedule is set.
0 represents Sunday, and 6 represents Saturday.
The form is crontab-like, with possible values of:
<ul>
<li> - := match nothing;
<li> 1 := match day 1 (Mon);
<li> 1,3 := match day 1 and 3 (Mon and Wed);
<li> 2-5 := match day 2,3,4,5 (Tue,Wed,Thu,Fri);
<li> * := matches all possible legal values;
</ul>
:param is_compressed: If true SnapMirror will compress/decompress the data that is
transferred between the source and destination storage system.
If false, transferred data will not be compressed. Upon initial
configuration, default is false. On subsequent requests, the
current configured setting is retained if not provided.
This argument can only be used when a connection definition is
used for the relationship entry. Using this argument without a
connection definition will throw an error message.
:param max_transfer_rate: Maximum transfer rate in kilobytes per second. If specified as 0,
transfer happens as fast as the storage system can. If not
specified, the previous value is retained. If nothing was
mentioned previously, it is set to the default value.
:param tcp_window_size: TCP window size in bytes. If specified as 0, size is set to
an internally determined default value. If not specified, the
previous value is retained.
:param restart: restart mode when transfer is interrupted. Possible values are
"always", "never" and "default". If value is set to "always",
then an interrupted transfer will always restart, if it has a
restart check point and the conditions are the same as before
the transfer was interrupted. If value is set to "never", then
an interrupted transfer will never restart, even if it has a
restart checkpoint. If not specified, the previous value is
retained. If nothing was mentioned previously, then it is set to
default, where SnapMirror behaves like the "always" case, unless
it has passed the next scheduled transfer time, in which case it
will begin that scheduled transfer instead of restarting.
:param connection_mode: This option specifies the mode to be used for establishing
connection between source and destination. Possible values are
"inet", "inet6" and "default". If this option is set to "inet6",
connections between source and destination will be established
using IPv6 addresses only. If there are no IPv6 addresses
configured, then the connection will fail. If the option is set
to "inet", connections between source and destination will be
established using IPv4 addresses only. If there are no IPv4
addresses configured, then the connection will fail.
<p>
If not specified, the previous value is retained. If nothing was
mentioned previously, it is set as default, where connection will
be tried using both "inet6" and "inet". "inet6" will have higher
precedence than "inet". If connection request using "inet6" fails,
SnapMirror will retry the connection using "inet".
<p>
This argument is not effective when an IP address is specified
instead of source hostname. If the IP address format and
connection mode do not match, the operation will fail with proper
error message.
"""
return self.request( "snapmirror-set-schedule", {
'is_compressed': [ is_compressed, 'is-compressed', [ bool, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
'hours': [ hours, 'hours', [ basestring, 'None' ], False ],
'tcp_window_size': [ tcp_window_size, 'tcp-window-size', [ int, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'restart': [ restart, 'restart', [ basestring, 'None' ], False ],
'connection_mode': [ connection_mode, 'connection-mode', [ basestring, 'None' ], False ],
'days_of_month': [ days_of_month, 'days-of-month', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'minutes': [ minutes, 'minutes', [ basestring, 'None' ], False ],
'days_of_week': [ days_of_week, 'days-of-week', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_update_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None, source_snapshot=None, max_transfer_rate=None):
"""
The snapmirror-update-iter API updates the destination volumes of
one or more SnapMirror relationships. This API is not supported
on Infinite Volume constituents.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on or
scheduled to be worked on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
:param source_snapshot: Source Snapshot
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth. The max-transfer-rate option does not affect
load-sharing transfers and transfers for other relationships with
Relationship Capability of Pre 8.2 confined to a single cluster.
"""
return self.request( "snapmirror-update-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
'source_snapshot': [ source_snapshot, 'source-snapshot', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorUpdateIterInfo, True ],
'failure-list': [ SnapmirrorUpdateIterInfo, True ],
} )
def snapmirror_initialize_iter(self, query, max_failure_count=None, transfer_priority=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None, source_snapshot=None, max_transfer_rate=None):
"""
The snapmirror-initialize-iter API initializes the destination
volume of one or more SnapMirror relationships.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param transfer_priority: Transfer Priority
Possible values:
<ul>
<li> "low" ,
<li> "normal"
</ul>
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on or
scheduled to be worked on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
:param source_snapshot: Source Snapshot
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth. The max-transfer-rate option does not affect
load-sharing transfers and transfers for other relationships with
Relationship Capability of Pre 8.2 confined to a single cluster.
"""
return self.request( "snapmirror-initialize-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'transfer_priority': [ transfer_priority, 'transfer-priority', [ basestring, 'sm-transfer-priority-enum' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
'source_snapshot': [ source_snapshot, 'source-snapshot', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorInitializeIterInfo, True ],
'failure-list': [ SnapmirrorInitializeIterInfo, True ],
} )
def snapmirror_set_sync_schedule(self, visibility_frequency, destination_location, source_location, is_compressed=None, ops_throttle=None, sync_mode=None, tcp_window_size=None, connection_mode=None, is_data_motion_schedule=None):
"""
Establishes a synchronous or semi-synchronous schedule.
Currently, the schedules are in: /etc/snapmirror.conf.
Semi-synchronous mode is determined by specifying
semi-sync.
:param visibility_frequency: Controls how often the source snapspot will be visible
on the destination mirror. This input is used to control
the value of visibility_interval in the snapmirror.conf file.
The units are in seconds. A typical value to use for this
input is 180.
:param destination_location: The destination location of a schedule to set. The
destination location is of the volume form:
<filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
:param source_location: The source location of a schedule to set. The source
location is of the volume form: <filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
:param is_compressed: If true SnapMirror will compress/decompress the data that is
transferred between the source and destination storage system.
If false, transferred data will not be compressed. Upon initial
configuration, default is false. On subsequent requests, the
current configured setting is retained if not provided.
This argument can only be used when a connection definition is
used for the relationship entry. Using this argument without a
connection definition will throw an error message.
:param ops_throttle: The number of outstanding operations allowed before
blocking on the source. The format is a number
followed by the one of the following
units: "ops", "s", or "ms".
If the specified value is less than 10s, the mirror
is configured to run in a fully synchronous mode.
If the specified value is greater than or equal to
10s, the mirror is configured to run in semi-synchronous
mode. If not specified, the previous value is retained.
This is a deprecated parameter. Use the sync-mode
parameter instead to specify the sync mode.
:param sync_mode: This specifies whether the mirror should be configured
in sync or in semi-sync mode. Possible values are:
"full_sync" and "semi_sync".
If the user wants to configure the mirror to run in fully
synchronous mode, the user must specify "full_sync" as the
value for this parameter. If the user wants to configure the
mirror to run in semi-synchronous mode, the user must specify
"semi_sync" as the value of this parameter. If not specified,
the mirror will be configured to run in full synchronous mode.
This parameter overrides the deprecated ops-throttle
parameter.
:param tcp_window_size: TCP window size in bytes. If specified as 0, size is set to an
internally determined default value. If not specified, the previous
value is retained.
:param connection_mode: This option specifies the mode to be used for establishing
connection between source and destination. If this option is
set to "inet6", connections between source and destination will
be established using IPv6 addresses only. If there are no IPv6
addresses configured, then the connection will fail. If the
option is set to "inet", connections between source and
destination will be established using IPv4 addresses only. If
there are no IPv4 addresses configured, then the connection
will fail.
<p>
If not specified, the previous value is retained. If nothing was
mentioned previously, it is set as default, where connection will
be tried using both "inet6" and "inet". "inet6" will have higher
precedence than "inet". If connection request using "inet6" fails,
SnapMirror will retry the connection using "inet".
<p>
This argument is not effective when an IP address is specified
instead of source hostname. If the IP address format and
connection-mode do not match, the operation will fail with proper
error message.
:param is_data_motion_schedule: If true, indicates the schedule entry is part of a vfiler
migration (Data Motion). Upon initial configuration, default
value is false. On subsequent requests, the current configured
setting is retained if not provided.
<p>
Semi-sync SnapMirror uses this input to provide Data Motion
specific behavior.
"""
return self.request( "snapmirror-set-sync-schedule", {
'is_compressed': [ is_compressed, 'is-compressed', [ bool, 'None' ], False ],
'ops_throttle': [ ops_throttle, 'ops-throttle', [ basestring, 'None' ], False ],
'sync_mode': [ sync_mode, 'sync-mode', [ basestring, 'None' ], False ],
'visibility_frequency': [ visibility_frequency, 'visibility-frequency', [ int, 'None' ], False ],
'tcp_window_size': [ tcp_window_size, 'tcp-window-size', [ int, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'connection_mode': [ connection_mode, 'connection-mode', [ basestring, 'None' ], False ],
'is_data_motion_schedule': [ is_data_motion_schedule, 'is-data-motion-schedule', [ bool, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_resume_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None):
"""
Enables future transfers for one or more SnapMirror
relationships.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
"""
return self.request( "snapmirror-resume-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorResumeIterInfo, True ],
'failure-list': [ SnapmirrorResumeIterInfo, True ],
} )
def snapmirror_delete_sync_schedule(self, destination_location):
"""
Delete a synchronous schedule for a given destination.
This API must be executed on the destination filer.
:param destination_location: The destination location of a schedule to delete. The
destination location is of the volume form:
<filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
"""
return self.request( "snapmirror-delete-sync-schedule", {
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_break_async(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-break-async API breaks an Infinite Volume
SnapMirror relationship between a source and destination volume
of a data protection mirror. When Data ONTAP breaks the
relationship, the destination volume is made a read-write volume
and can diverge from the source volume, client redirection is
turned off on the destination volume, the restart checkpoint is
cleared, and the clients can see the latest Snapshot copy.
You must specify the destination endpoint when using
snapmirror-break-async.
Subsequent manual or scheduled SnapMirror updates to the broken
relationship will fail until the SnapMirror relationship is
reestablished using the snapmirror-resync API.
This API applies only to data protection mirrors for Infinite
Volume.
You must use the snapmirror-break-async API from the destination
cluster. This API is not supported on Infinite Volume
constituents.
This API is not supported if the destination end point is a
flexible volume.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-break-async", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_list_schedule(self, destination_location=None):
"""
Returns the schedule for a given destination or all
destinations. The API must be executed on the destination
filer. Currently, the schedules is in /etc/snapmirror.conf.
:param destination_location: The destination location of a schedule to obtain. The
destination location is of the volume form:
<filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
The <filer> must match the destination filer. If
the destination-location is not specified, then all the
destination schedules are returned.
"""
return self.request( "snapmirror-list-schedule", {
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
}, {
'snapmirror-schedules': [ SnapmirrorScheduleInfo, True ],
} )
def snapmirror_snapshot_owner_get(self, vserver, volume, snapshot):
"""
List all owners used to preserve a Snapshot copy for a SnapMirror
mirror-to-vault cascade configuration.
:param vserver: Vserver Name
:param volume: Volume Name
:param snapshot: Snapshot Copy Name
"""
return self.request( "snapmirror-snapshot-owner-get", {
'vserver': [ vserver, 'vserver', [ basestring, 'vserver-name' ], False ],
'volume': [ volume, 'volume', [ basestring, 'volume-name' ], False ],
'snapshot': [ snapshot, 'snapshot', [ basestring, 'snapshot-id' ], False ],
}, {
'snapshot-owner-names': [ basestring, True ],
} )
def snapmirror_check(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, target_snapshot=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-check API starts an operation to compare the
contents of a snapshot between the source volume and destination
volume.
You must specify the destination endpoint when using
snapmirror-check.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param target_snapshot: Specifies the Snapshot copy on the destination endpoint to
check.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-check", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'target_snapshot': [ target_snapshot, 'target-snapshot', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_set_connection(self, connection, address_pair1, address_pair2=None, mode=None):
"""
Sets up a connection. snapmirror-set-connection will
add a new connection or modify an existing one.
This API must be executed on the destination filer.
Currently, the connections are in: /etc/snapmirror.conf.
:param connection: Name of the connection to add or modify. The name is
in ASCII and must begin with an alpha character.
:param address_pair1: The connection's first source and destination
address pair. In multi mode, the first address
pair provides a connection path; while in
failover mode, the first address pair provides
the prefer connection path.
:param address_pair2: The connection's second source and destination
address pair. In multi mode the second address
pair provides another connection path, while in
failover mode, the second address pair provides
a connection path in case the first path fails.
:param mode: Possible mode values are "multi" or "failover".
If not specified, the default is "multi".
"""
return self.request( "snapmirror-set-connection", {
'address_pair2': [ address_pair2, 'address-pair2', [ AddressPair, 'None' ], False ],
'connection': [ connection, 'connection', [ basestring, 'None' ], False ],
'address_pair1': [ address_pair1, 'address-pair1', [ AddressPair, 'None' ], False ],
'mode': [ mode, 'mode', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_abort(self, source_vserver=None, source_volume=None, check_only=None, clear_checkpoint=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-abort API stops ongoing transfers for a
SnapMirror relationship. The relationship is identified by its
destination endpoint.
You must specify the destination endpoint when using
snapmirror-abort.
On Data ONTAP operating in Cluster-Mode, the snapmirror-abort
API stops all of the active transfers to each associated volume
on the receiving side in a set of load-sharing mirrors.
Load-sharing mirrors are either up to date and serving data to
clients, or they are lagging and not serving data to clients. If
the snapmirror-abort API identifies an up-to-date load-sharing
mirror, then SnapMirror transfers to associated up-to-date
load-sharing mirrors in the set of load-sharing mirrors are also
aborted. If the snapmirror-abort API identifies a lagging
load-sharing mirror, then only the SnapMirror transfer associated
with the lagging load-sharing mirror is aborted.
After the snapmirror-abort API successfully completes its
operation, the volume on the receiving side of the transfer might
contain a restart checkpoint. The restart checkpoint can be used
by a subsequent transfer to restart and continue the aborted
SnapMirror transfer.
Snapmirror-abort API must be used from the destination storage
system on Data ONTAP operating in 7-Mode, from the destination
cluster on Data ONTAP 8.1 operating in Cluster-Mode, from the
destination Vserver or cluster on Data ONTAP 8.2 or later
operating in Cluster-Mode.
<p>
This API is not supported if the destination end point is
an Infinite Volume.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source volume.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_volume: Specifies the source volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param check_only: If this option is specified true, only snapmirror-check
operations active on the relationship will be aborted.
:param clear_checkpoint: If true, the restart checkpoint is cleared. The default is
false, not cleared.
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter.
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, when specifying a
destination endpoint, you must use either the destination
location, or the destination cluster, destination Vserver,
and destination volume.
This parameter is mandatory on Data ONTAP operating in 7-mode.
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, When specifying a
source endpoint, you must use either the source location, or the
source cluster, source Vserver, and source volume.
:param destination_cluster: Specifies the destination cluster of the SnapMirror relationship.
The destination Vserver and destination volume must also be
specified if using this parameter.
"""
return self.request( "snapmirror-abort", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'check_only': [ check_only, 'check-only', [ bool, 'None' ], False ],
'clear_checkpoint': [ clear_checkpoint, 'clear-checkpoint', [ bool, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_get_volume_status(self, volume):
"""
Returns SnapMirror status values for a given volume. Including
whether: the volume is a source of a SnapMirror relationship;
the volume is a destination of a SnapMirror relationship;
a transfer is in progress; the relationship is broken off.
On Data ONTAP 8.1 operating in Cluster-Mode, this API is
provided for backward compatibility only. It will fail if
the volume is the source or destination of a load-sharing
SnapMirror relationship. It is recommended to use the
snapmirror-get-iter API to get the same information.
On Data ONTAP 8.1 operating in Cluster-Mode, this API must be
issued on the cluster the volume belongs to.
This API is not supported On Data ONTAP 8.2 or later operating
in Cluster-Mode. You must use the snapmirror-get-iter and
snapmirror-get-destination-iter APIs. If issued on Data ONTAP
8.2 or later operating in Cluster-Mode, this API will return
EOPNOTSUPPORTED error.
:param volume: Name of the volume to be queried.
On Data ONTAP operating in Cluster-Mode, specifies the location
of the volume the following formats:
[<cluster>:][//<vserver>/]<volume>
"""
return self.request( "snapmirror-get-volume-status", {
'volume': [ volume, 'volume', [ basestring, 'None' ], False ],
}, {
'is-destination': [ bool, False ],
'is-transfer-in-progress': [ bool, False ],
'is-transfer-broken': [ bool, False ],
'is-source': [ bool, False ],
} )
def snapmirror_off(self):
"""
Disables SnapMirror data transfers and turns off the
SnapMirror scheduler. Check the SnapMirror status
with the snapmirror-get-status API for results.
"""
return self.request( "snapmirror-off", {
}, {
} )
def snapmirror_get_iter(self, max_records=None, query=None, tag=None, desired_attributes=None):
"""
The snapmirror-get-iter API returns information for one or
several SnapMirror relationships.
<p>
On Data ONTAP 8.1 operating in Cluster-Mode, this API can be
issued on a cluster. It returns all SnapMirror relationships that
have a source or destination endpoint in that cluster, and that
match the parameters specified.
On Data ONTAP 8.2 operating in Cluster-Mode, this API can be
issued on a Vserver or a cluster. It returns all the SnapMirror
relationships, which have a destination endpoint in that Vserver
or cluster, and that match the parameters specified.
<p>
:param max_records: The maximum number of records to return in this call.
Default: 20
:param query: A query that specifies which objects to return.
A query could be specified on any number of attributes in the
snapmirror object.
All snapmirror objects matching this query up to 'max-records'
will be returned.
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the 'next-tag' obtained from the previous
call.
:param desired_attributes: Specify the attributes that should be returned.
If not present, all attributes for which information is available
will be returned.
If present, only the desired attributes for which information is
available will be returned.
"""
return self.request( "snapmirror-get-iter", {
'max_records': max_records,
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
'tag': tag,
'desired_attributes': [ desired_attributes, 'desired-attributes', [ SnapmirrorInfo, 'None' ], False ],
}, {
'attributes-list': [ SnapmirrorInfo, True ],
} )
def snapmirror_release(self, source_vserver=None, source_volume=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, relationship_id=None, relationship_info_only=None):
"""
The snapmirror-release API removes a SnapMirror relationship
on the source endpoint. It unlocks and cleanups the snapshots
pertaining to the relationship. It does not destroy any volume.
You must specify the destination endpoint when using
snapmirror-release.
Unless relationship-info-only is specified, this operation
will fail if it is unable to reach the source volume and
clean up snapshots.
On Data ONTAP 8.2 or later operating in Cluster-Mode,
this API must be executed on the source Vserver or source
cluster following the deletion of the relationship on the
destination Vserver or destination cluster.
It is possible to issue snapmirror-release on the source
Vserver without deleting the relationship on the destination
Vserver. However, the relationship will continue to exist
because the destination is the authority. The relationship will
reappear on the source on the next transfer.
On Data ONTAP operating in 7-Mode, this API must be issued on
the source storage system. On Data ONTAP 8.2 or later operating
in Cluster-Mode, this API must be issued on the source Vserver
if operating in Vserver context and on the source cluster if
operating in a cluster context.
This API is not supported on Data ONTAP 8.1 operating in
Cluster-Mode, or Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the source volume must also be
specified.
:param source_volume: Specifies the source volume of the SnapMirror relationship.
If using this parameter, the source Vserver must also be
specified:
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the destination volume must also be
specified.
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, when specifying a
destination endpoint, you must use either the destination
location, or destination Vserver and destination volume.
This parameter is mandatory on Data ONTAP operating in 7-mode
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the destination Vserver must also be
specified.
:param source_location: Specifies the source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, When specifying a
source endpoint, you must use either the source location, or the
source Vserver, and source volume.
:param relationship_id: Specifies the relationship unique identifier of the
SnapMirror relationship.
:param relationship_info_only: If relationship-info-only is set to true then only
relationship information is removed. By default it is false.
"""
return self.request( "snapmirror-release", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'relationship_id': [ relationship_id, 'relationship-id', [ basestring, 'None' ], False ],
'relationship_info_only': [ relationship_info_only, 'relationship-info-only', [ bool, 'None' ], False ],
}, {
} )
def snapmirror_list_connections(self, connection=None):
"""
Returns connection information for a given connection or
all connections. The API must be executed on the
destination filer. Currently, the connections are in:
/etc/snapmirror.conf.
:param connection: Connection name of the connection information to obtain.
If the connections is not specified, then the
connection information for all the connections is returned.
"""
return self.request( "snapmirror-list-connections", {
'connection': [ connection, 'connection', [ basestring, 'None' ], False ],
}, {
'snapmirror-connections': [ SnapmirrorConnectionInfo, True ],
} )
def snapmirror_resync(self, preserve=None, source_vserver=None, source_volume=None, destination_snapshot=None, transfer_priority=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, source_snapshot=None, max_transfer_rate=None, destination_cluster=None):
"""
Re-establishes a mirroring relationship between a source volume
and a destination volume, typically in the following cases:
<ul>
<li> The destination mirror is broken (that is, the destination
volume is a read-write volume and no longer a data protection
mirror). After the snapmirror-resync API completes, the
destination volume is made a data protection mirror and the
mirror can be manually updated or scheduled for updates.
<li> A snapmirror-update API failed because the required common
Snapshot copy was deleted on the source volume.
</ul>
After the operation completes, the destination volume is made a
data protection mirror and the mirror can be manually updated
or scheduled for updates.
<b>Attention:</b> The snapmirror-resync API can cause data loss
on the destination volume because the API can remove the exported
Snapshot copy on the destination volume.
<p>
The default behavior of the snapmirror-resync API is defined as follows:
<ul>
<li> Finds the most recent common Snapshot copy between the source and
destination volumes, removes Snapshot copies on the destination volume
that are newer than the common Snapshot copy and mounts the destination
volume as a DP volume with the common Snapshot copy as the exported
Snapshot copy.
<li> For data protection relationships, takes a Snapshot copy of the
source volume to capture the current image and transfers Snapshot copies
that are newer than the common Snapshot copy from the source volume to
the destination volume. For vault relationships, transfers Snapshot copies
newer than the common Snapshot copy according to the relationship policy,
i.e., Snapshot copies will match rules associated with the policy as
defined by the snapmirror-policy API.
</ul>
On Data ONTAP 8.2 or later operating in Cluster-Mode, the snapmirror-resync
API supports an optional parameter 'preserve'.
The parameter 'preserve' is only supported for vault relationships.
When used, the parameter 'preserve' changes the behavior of the
snapmirror-resync API. The changed behavior can be described as follows:
<ul>
<li> Finds the most recent common Snapshot copy between the source
and destination volumes, preserves all Snapshot copies on the destination
volume that are newer than the common Snapshot copy, and mounts the
destination volume as a DP volume with the common Snapshot copy as the
exported Snapshot copy.
<li> Performs a local rollback transfer to make a copy of the common
Snapshot copy on the destination volume and establish it as the latest
Snapshot copy on the destination volume. The command then transfers all
Snapshot copies that are newer than the common Snapshot copy, from the
source volume to the destination volume. The command only transfers
Snapshot copies that match the vault relationship's policy, i.e.,
Snapshot copies will match rules associated with the policy as defined
by the snapmirror-policy APIs.
</ul>
The snapmirror-resync API fails if the destination volume does
not have a Snapshot copy in common with the source volume.
<p>
On Data ONTAP 8.1 operating in Cluster-Mode, or on Data ONTAP
8.2 operating in Cluster-Mode for relationships using a control
plane compatible with Data ONTAP 8.1 operating in Cluster-Mode,
a job is spawned to operate for the SnapMirror relationship and
the job id is returned. The progress of the operation can be
tracked using the job APIs.
<p>
On Data ONTAP 8.2 or later operating in Cluster-Mode, you can
track the progress of the operation using the snapmirror-get
API except for relationships using a control plane compatible
with Data ONTAP 8.1 operating in Cluster-Mode.
<p>
On Data ONTAP operating in 7-Mode, the update is asynchronously
handled, and there is no guarantee that it succeeds.
This requires that a schedule in /etc/snapmirror.conf is set
for the destination.
<p>
The API must be issued on the destination storage system on
Data ONTAP operating in 7-Mode, on the destination cluster
on Data ONTAP 8.1 operating in Cluster-Mode, and on the
destination Vserver on Data ONTAP 8.2 or later operating in
Cluster-Mode.
:param preserve: This parameter is only available for Vault relationships.
The default value is false which means all snapshots on the
destination that are newer than the latest common snapshot
will be deleted. If preserve is specified, newer snapshots
are retained. If preserve is specified for a non-Vault
relationship the API will fail.
<p>This parameter only applies on Data ONTAP 8.2 or later
operating in Cluster-Mode and for vault relationships.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source volume.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_volume: Specifies the source volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_snapshot: Creates the specified snapshot (in addition to the regular
SnapMirror snapshot) on the destination after the qtree
SnapMirror transfer is over.
:param transfer_priority: Specifies the priority at which the transfer runs.
Possible values are: "normal", and "low". The default
value is the value specified in the snapmirror policy which
is associated with the relationship.
<p>This parameter only applies on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane
is 'v2'.
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter.
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, when specifying a
destination endpoint, you must use either the destination
location, or the destination cluster, destination Vserver,
and destination volume.
This parameter is mandatory on Data ONTAP operating in 7-mode
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP Cluster-Mode when specifying a source endpoint,
you must use either the source location, or the source
cluster, source Vserver, and source volume.
On Data ONTAP operating in 7-Mode, If the source-location is
not specified, then the source in /etc/snapmirror.conf for
the destination path is used.
:param source_snapshot: Designates the source snapshot to use for a qtree update
on Data ONTAP operating in 7-Mode, and the snapshot on the
source volume to use for the transfer on Data ONTAP 8.2 or
later operating in Cluster-Mode.
<p>For data protection mirror relationships, Data ONTAP
Cluster-Mode does not create a new Snapshot copy. It will
use the specified Snapshot copy as if it were the most recent
one; that is, all copies between the most recent common one and
the specified one are transferred, but no copies newer than the
specified one are transferred.
<p>For vault relationships, Data ONTAP Cluster-Mode transfers the
specified Snapshot copy instead of the ones that match its policy's rules.
<p>This parameter only applies on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane
is 'v2'.
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth.
On Data ONTAP operating in Cluster-Mode, the max-transfer-rate
option does not affect load-sharing transfers and transfers for
other relationships with Relationship Capability of Pre 8.2
confined to a single cluster.
:param destination_cluster: Specifies the destination cluster of the SnapMirror relationship.
The destination Vserver and destination volume must also be
specified if using this parameter.
"""
return self.request( "snapmirror-resync", {
'preserve': [ preserve, 'preserve', [ bool, 'None' ], False ],
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'destination_snapshot': [ destination_snapshot, 'destination-snapshot', [ basestring, 'None' ], False ],
'transfer_priority': [ transfer_priority, 'transfer-priority', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'source_snapshot': [ source_snapshot, 'source-snapshot', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_on(self):
"""
Enables SnapMirror data transfers and turns on the
SnapMirror scheduler. Check the SnapMirror status
with the snapmirror-get-status API for results.
"""
return self.request( "snapmirror-on", {
}, {
} )
def snapmirror_get_destination_iter(self, max_records=None, query=None, tag=None, desired_attributes=None):
"""
The snapmirror-get-destination-iter API returns information
about, one or more SnapMirror relationships whose source
endpoints are in the Vserver or the cluster the API is issued
on.
<p>
The information returned can be stale. Stale information
corresponds to a SnapMirror relationship that has been deleted on
its destination cluster or Vserver. Stale information may result
to several entries being returned with the same source and
destination endpoints, but with different relationship IDs.
<p>
Note that the information for a SnapMirror relationship will not
be available on its source Vserver or source cluster until at
least one transfer is initiated.
<p>
This API is only supported on Data ONTAP 8.2 and above operating
in Cluster-Mode. It can be issued on a Vserver or a Cluster.
:param max_records: The maximum number of records to return in this call.
Default: 20
:param query: A query that specifies which objects to return.
A query could be specified on any number of attributes in the
snapmirror object.
All snapmirror objects matching this query up to 'max-records'
will be returned.
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the 'next-tag' obtained from the previous
call.
:param desired_attributes: Specify the attributes that should be returned.
If not present, all attributes for which information is available
will be returned.
If present, only the desired attributes for which information is
available will be returned.
"""
return self.request( "snapmirror-get-destination-iter", {
'max_records': max_records,
'query': [ query, 'query', [ SnapmirrorDestinationInfo, 'None' ], False ],
'tag': tag,
'desired_attributes': [ desired_attributes, 'desired-attributes', [ SnapmirrorDestinationInfo, 'None' ], False ],
}, {
'attributes-list': [ SnapmirrorDestinationInfo, True ],
} )
def snapmirror_list_sync_schedule(self, destination_location=None):
"""
Returns a synchronous schedule for a given destination or
all destinations. The API must be executed on the
destination filer.
Currently, the schedules is in /etc/snapmirror.conf.
:param destination_location: The destination location of a schedule to obtain. The
destination location is of the volume form:
<filer>:<volume>
or the qtree form:
<filer>:/vol/<volume>/<qtree>.
The <filer> must match the destination filer. If
the destination-location is not specified, then all the
destination schedules are returned.
"""
return self.request( "snapmirror-list-sync-schedule", {
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
}, {
'snapmirror-sync-schedules': [ SnapmirrorSyncScheduleInfo, True ],
} )
def snapmirror_cache_rebuild_relationship(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
auto generated : Rebuild the cache for a SnapMirror
relationship.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-cache-rebuild-relationship", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_quiesce(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
Disables future transfers to a SnapMirror destination.
If there is no transfer in progress, the SnapMirror
relationship becomes 'Quiesced'. If there is a transfer in
progress, the SnapMirror relationship becomes 'Quiescing' until
the transfer completes. If the current transfer aborts, it will
be treated like a future transfer and will not restart.
When a SnapMirror relationship is quiesced, it remains in that
state across reboots and fail-overs.
The relationship must exist on the destination and you must
specify the destination endpoint when using snapmirror-quiesce.
On Data ONTAP 8.1 operating in Cluster-Mode, if applied
to a load-sharing (LS) SnapMirror relationship, all the
relationships in the set will be quiesced.
This API must be issued from the destination storage system on
Data ONTAP operating in 7-Mode, on the destination cluster on
Data ONTAP 8.1 operating in Cluster-Mode, and the destination
Vserver on Data ONTAP 8.2 or later operating in Cluster-Mode.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source volume.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_volume: Specifies the source volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter.
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, when specifying a
destination endpoint, you must use either the destination
location, or the destination cluster, destination Vserver,
and destination volume.
This parameter is mandatory on Data ONTAP 7-mode
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, When specifying a
source endpoint, you must use either the source location, or the
source cluster, source Vserver, and source volume.
:param destination_cluster: Specifies the destination cluster of the SnapMirror relationship.
The destination Vserver and destination volume must also be
specified if using this parameter.
"""
return self.request( "snapmirror-quiesce", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_destroy_async(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-destroy-async API removes only the SnapMirror
relationship of a source and a destination Infinite Volume, the
volumes are not destroyed and Snapshot copies on the volumes are
not removed.
You must specify the destination endpoint when using
snapmirror-destroy-async.
The snapmirror-destroy-async API fails if a SnapMirror transfer
for the SnapMirror relationship is in progress.
The snapmirror-destroy-async API preserves the read-write or
read-only attributes of the volumes of a SnapMirror relationship
after the relationship is deleted. Therefore, a read-write volume
that was the source of a SnapMirror relationship retains its
read-write attributes, and a data protection volume that was a
destination of a SnapMirror relationship retains its read-only
attributes.
The snapmirror-destroy-async API should be used from the
destination cluster. When used in this fashion, the
destination-side information will be cleaned up. The destination
will also attempt to cleanup source-side information. If the
source cluster is not available, the destination-side information
will still be cleaned up.
This API is not supported if the destination end point is a
flexible volume or an Infinite Volume constituent.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-destroy-async", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_update(self, source_vserver=None, source_volume=None, destination_snapshot=None, transfer_priority=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, source_snapshot=None, max_transfer_rate=None, destination_cluster=None):
"""
Updates the destination endpoint of the SnapMirror relationship.
The update is asynchronously handled, and there is no
guarantee that it will succeed.
<p>
On Data ONTAP operating in 7-Mode the snapmirror-get-status API
can be used to check the status of the update. The API must
be issued on the destination storage system.
<p>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP
8.2 operating in Cluster-Mode and for relationships using
a control plane compatible with Data 8.1 operating Cluster-Mode
(relationship-control-plane set 'v1'), a job will be spawned to
operate on the SnapMirror relationship, and the job id will be
returned. The progress of the job can be tracked using the job
APIs.
<p>
On Data ONTAP 8.2 or later operating in Cluster-Mode, you
can track the progress of the operation using the
snapmirror-get API, except for relationships using a control
plane compatible with Data ONTAP 8.1 operating Cluster-Mode.
<p>
You must specify the destination endpoint when using
snapmirror-update.
<p>
The API makes the destination volume an up-to-date mirror of the
source volume.
<p>
This API must be used from the destination storage system on
Data ONTAP 7-Mode, or from the destination cluster on Data
ONTAP 8.1 operating in Cluster-Mode, and from the destination
Vserver on Data ONTAP 8.2 or later operating in Cluster-Mode.
<p>
On Data ONTAP operating in 7-Mode, if the destination endpoint
is a volume, the volume must be in the restricted state. If the
destination endpoint is a qtree, the qtree must not already
exist.
<p>
On Data ONTAP Cluster-Mode if the destination volume is empty,
the snapmirror-update API will fail. The snapmirror-initialize
API must be called to perform the baseline transfer before the
the snapmirror-update can be called.
<p>
For data protection relationships, the snapmirror-update API
makes the destination volume an up-to-date mirror of the source
volume with the following steps:</p>
<ul>
<li>If the source volume is read-write, takes a Snapshot copy on
the source volume to capture the current image of the source volume.
<li>Finds the most recent Snapshot copy on the destination volume
and validates that the corresponding Snapshot copy is on the source.
<li>Incrementally transfers Snapshot copies that are newer than
the corresponding Snapshot copy to the destination volume.
</ul>
<p>
For vault relationships, the snapmirror-update API does not take
a Snapshot copy on the source volume but transfers only selected
Snapshot copies that are newer than the common Snapshot copy to
the destination volume. Snapshot copies are selected by matching
their 'snapmirror-label' with the 'snapmirror-label' of one of
the rules from the corresponding SnapMirror policy associated
to the SnapMirror relationship.
All matching Snapshot copies are incrementally transferred to the
destination volume.
<p>
For vault relationships, the snapmirror-update API also manages
expiration of Snapshot copies on the destination volume. It does
so by deleting Snapshot copies that have exceeded the value of
'keep' for the matching rule from the corresponding SnapMirror
policy associated with the SnapMirror relationship. Snapshot copies
that match the same 'snapmirror-label' will be deleted in
oldest-first order.
<p>
For data protection relationships, the parameter 'source-snapshot'
is optional and allows for the transfer of Snapshot copies newer than
the common Snapshot copy up to the specified 'source-snapshot'.
<p>
For vault relationships, the parameter 'source-snapshot' is optional
and allows transfer of a Snapshot copy that is older than the common
Snapshot copy and/or may not be selected for transfer based on
policy-based selection of a scheduled update transfer.
<p>
After the snapmirror-update API successfully completes, the last
Snapshot copy transferred is made the new exported Snapshot copy
on the destination volume. If an update to a vault relationship
specifies a Snapshot copy using the 'source-snapshot' parameter
that is older than the common snapshot, after the snapmirror-update
API successfully completes, the exported Snapshot copy on the
destination volume will remain unchanged.
<p>
If the snapmirror-update does not finish successfully, due to a
network failure or because a snapmirror-abort API was issued for
example, a restart checkpoint might be recorded on the
destination volume. If a restart checkpoint is recorded, the
next update restarts and continues the transfer from the restart
checkpoint. For vault relationships, the next update will restart
and continue the old transfer regardless of whether it is a
matching Snapshot copy or not.
<p>
On Data ONTAP 8.1 operating in Cluster-Mode, you can
use the snapmirror-update API to update a specific load-sharing
mirror that lags behind up-to-date destination volumes in the
set of load-sharing mirrors. An update to the lagging
load-sharing mirror should bring it up to date with the other
up-to-date destination volumes in the set of load-sharing
mirrors.
Note: You might have to run the snapmirror-update API more than
once if the command does not finish before the next scheduled
update of the set of load-sharing mirrors.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source volume.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_volume: Specifies the source volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_snapshot: Creates the specified snapshot (in addition to the regular
SnapMirror snapshot) on the destination after the qtree
SnapMirror transfer is over.
:param transfer_priority: Specifies the priority at which the transfer runs.
Possible values are: "normal", and "low". The default
value is the value specified in the snapmirror policy which is
associated with the relationship.
<p>This parameter only applies on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane
is 'v2'.
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter.
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP Cluster-Mode, when specifying a destination
endpoint, you must use either the destination location, or
the destination cluster, destination Vserver, and destination
volume.
On Data ONTAP 7-Mode, if the destination endpoint is a volume,
the volume must be in the restricted state. If the destination
endpoint is a qtree, the qtree must not already exist.
This parameter is mandatory on Data ONTAP 7-mode
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP Cluster-Mode when specifying a source endpoint,
you must use either the source location, or the source
cluster, source Vserver, and source volume.
On Data ONTAP 7-Mode, If the source-location is not specified,
then the source in /etc/snapmirror.conf for the destination
path is used.
:param source_snapshot: Specifies the Snapshot copy on the source to use as the basis
for the update. It is used for updates to Data ONTAP 7-mode
qtree relationships and Data ONTAP Cluster-Mode relationships.
<p>For a qtree relationship, Data ONTAP 7-mode does not create a
new Snapshot copy and transfers the specified Snapshot copy
instead.
<p>For data protection mirror relationships, Data ONTAP
Cluster-Mode does not create a new Snapshot copy. It will use
the specified Snapshot copy as if it were the most recent one;
that is, all copies between the most recent common one and the
specified one are transferred, but no copies newer than the
specified one are transferred.
<p>For vault relationships, Data ONTAP Cluster-Mode transfers
the specified Snapshot copy instead of the ones that match its
policy's rules.
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth.
On Data ONTAP operating in Cluster-Mode, the max-transfer-rate
option does not affect load-sharing transfers and transfers for
other relationships with Relationship Capability of Pre 8.2
confined to a single cluster.
:param destination_cluster: Specifies the destination cluster of the SnapMirror relationship.
The destination Vserver and destination volume must also be
specified if using this parameter.
"""
return self.request( "snapmirror-update", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'destination_snapshot': [ destination_snapshot, 'destination-snapshot', [ basestring, 'None' ], False ],
'transfer_priority': [ transfer_priority, 'transfer-priority', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'source_snapshot': [ source_snapshot, 'source-snapshot', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_abort_async(self, source_vserver=None, source_volume=None, check_only=None, clear_checkpoint=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
The snapmirror-abort-async API stops ongoing transfer for a
SnapMirror relationship on Infinite Volume. The relationship is
identified by its destination endpoint.
You must specify the destination endpoint when using the
snapmirror-abort-async API.
After the snapmirror-abort-async API successfully aborted the
transfer, the volume on the receiving side of the transfer might
contain a restart checkpoint. The restart checkpoint can be used
by a subsequent transfer to restart and continue the aborted
SnapMirror transfer.
Snapmirror-abort-async API must be used from the destination
cluster on Data ONTAP Cluster-Mode.
This API is not supported if the destination end point is a
flexible volume or an Infinite Volume constituent.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param check_only: If this option is specified true, only snapmirror-check
operations active on the relationship will be aborted.
:param clear_checkpoint: If this option is specified true, the restart checkpoint is
discarded and the destination volume is restored to the last
Snapshot copy that was successfully transferred. You can use the
clear-checkpoint option to discard the restart checkpoint of a
previous transfer attempt which forces the subsequent transfer to
start with a fresh Snapshot copy on the destination volume.
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-abort-async", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'check_only': [ check_only, 'check-only', [ bool, 'None' ], False ],
'clear_checkpoint': [ clear_checkpoint, 'clear-checkpoint', [ bool, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_release_iter(self, query, max_failure_count=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None):
"""
releases one or a group of SnapMirror relationships.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
"""
return self.request( "snapmirror-release-iter", {
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorDestinationInfo, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorReleaseIterInfo, True ],
'failure-list': [ SnapmirrorReleaseIterInfo, True ],
} )
def snapmirror_update_ls_set(self, source_cluster=None, source_vserver=None, source_location=None, source_volume=None):
"""
The snapmirror-update-ls-set API updates destination volumes of
the set of load-sharing mirrors. The API makes destination
volumes, in the group of load-sharing mirrors, up-to-date mirrors
of the source volume.
You must specify the source endpoint when using
snapmirror-update-ls-set.
Separate SnapMirror transfers are performed from the source
volume to each of the up-to-date destination volumes in the set
of load-sharing mirrors.
Load-sharing mirrors that lag behind the up-to-date destination
volumes might not be updated by the snapmirror-update-ls-set API.
Use the snapmirror-update API to update a lagging load-sharing
mirror.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter. This parameter is supported only in cluster
context.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship. The
source cluster and source volume must also be specified if using
this parameter.
:param source_location: Specifies the source endpoint of the SnapMirror relationship.
When specifying a source endpoint, you must use either the source
location, or the source cluster, source Vserver, and source
volume.
:param source_volume: Specifies the source volume of the SnapMirror relationship. The
source cluster and source Vserver must also be specified if using
this parameter.
"""
return self.request( "snapmirror-update-ls-set", {
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
}, {
'result-error-message': [ basestring, False ],
'result-jobid': [ int, False ],
'result-error-code': [ int, False ],
'result-status': [ basestring, False ],
} )
def snapmirror_resync_iter(self, query, preserve=None, max_failure_count=None, transfer_priority=None, max_records=None, return_success_list=None, tag=None, continue_on_failure=None, return_failure_list=None, source_snapshot=None, max_transfer_rate=None):
"""
The snapmirror-resync-iter API reestablishes one or more
previously broken SnapMirror relationships. This API is not
supported on Infinite Volume constituents.
A job will be spawned to operate on the snapmirror and the job id
will be returned.
The progress of the job can be tracked using the job APIs.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param preserve: Preserve
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param transfer_priority: Transfer Priority
Possible values:
<ul>
<li> "low" ,
<li> "normal"
</ul>
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on or
scheduled to be worked on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
:param source_snapshot: Source Snapshot
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred between clusters. The default is unlimited (0)
which permits the SnapMirror relationship to fully utilize the
available network bandwidth. The max-transfer-rate option does
not affect load-sharing transfers and transfers for other
relationships with Relationship Capability of Pre 8.2 confined to
a single cluster.
"""
return self.request( "snapmirror-resync-iter", {
'preserve': [ preserve, 'preserve', [ bool, 'None' ], False ],
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'transfer_priority': [ transfer_priority, 'transfer-priority', [ basestring, 'sm-transfer-priority-enum' ], False ],
'max_records': max_records,
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
'source_snapshot': [ source_snapshot, 'source-snapshot', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorResyncIterInfo, True ],
'failure-list': [ SnapmirrorResyncIterInfo, True ],
} )
def snapmirror_modify_iter(self, query, max_records=None, max_failure_count=None, schedule=None, return_success_list=None, vserver=None, tries=None, tag=None, continue_on_failure=None, return_failure_list=None, policy=None, max_transfer_rate=None):
"""
The snapmirror-modify-iter API allows to change one or more
parameters of one or more SnapMirror relationships. This API is
not supported on Infinite Volume constituents.
:param query: If operating on a specific snapmirror, this input element must
specify all keys.
If operating on snapmirror objects based on query, this input
element must specify a query.
:param max_records: The maximum number of snapmirror objects to be operated in this
call.
Default: 20
:param max_failure_count: When allowing failures ('continue-on-failure' is set to true),
then this input element may be provided to limit the number of
failed operations before the server gives up and returns.
If set, the API will continue with the next matching snapmirror
even when the operation on a previous matching snapmirror fails,
and do so until the total number of objects failed to be operated
on reaches the maximum specified.
If set to the maximum or not provided, then there will be no
limit on the number of failed operations.
Only applicable if 'continue-on-failure' is set to true.
Default: 2^32-1
:param schedule: Specifies the name of the cron schedule, used to update the
SnapMirror relationship.
:param return_success_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were successfully operated on.
If set to false, the list of snapmirror objects operated on will
not be returned.
Default: true
:param vserver: If this optional parameter is specified, designates the managing
Vserver. The managing Vserver is authorized to use snapmirror
commands to manage the SnapMirror relationship. The vserver
option is currently a reserved option.
:param tries: Specifies the maximum number of times to attempt each manual or
scheduled transfer for a SnapMirror relationship. The default is
eight times.
Note: You can set the tries option to zero (0) to disable manual
and scheduled updates for the SnapMirror relationship. This
parameter is only relevant on Data ONTAP 8.1 operating in
Cluster-Mode. On Data ONTAP 8.2 operating in Cluster-Mode, the
maximum number of times to attempt a transfer is an attribute of
the SnapMirror policy. Therefore the value of this parameter is
ignored.
:param tag: Specify the tag from the last call.
It is usually not specified for the first call. For subsequent
calls, copy values from the next-tag obtained from the previous
call.
:param continue_on_failure: This input element is useful when multiple snapmirror objects
match a given query.
If set to true, the API will continue with the next matching
snapmirror even when the operation fails for the snapmirror.
If set to false, the API will return on the first failure.
Default: false
:param return_failure_list: If set to true, the API will return the list of snapmirror
objects (just keys) that were not operated on due to some error.
If set to false, the list of snapmirror objects not operated on
will not be returned.
Default: true
:param policy: Specifies the name of the SnapMirror policy that applies to this
relationship.
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth. The max-transfer-rate option does not affect
load-sharing transfers and transfers for other relationships with
Relationship Capability of Pre 8.2 confined to a single cluster.
"""
return self.request( "snapmirror-modify-iter", {
'max_records': max_records,
'max_failure_count': [ max_failure_count, 'max-failure-count', [ int, 'None' ], False ],
'schedule': [ schedule, 'schedule', [ basestring, 'None' ], False ],
'return_success_list': [ return_success_list, 'return-success-list', [ bool, 'None' ], False ],
'vserver': [ vserver, 'vserver', [ basestring, 'vserver-name' ], False ],
'tries': [ tries, 'tries', [ basestring, 'None' ], False ],
'tag': tag,
'continue_on_failure': [ continue_on_failure, 'continue-on-failure', [ bool, 'None' ], False ],
'return_failure_list': [ return_failure_list, 'return-failure-list', [ bool, 'None' ], False ],
'policy': [ policy, 'policy', [ basestring, 'sm-policy' ], False ],
'query': [ query, 'query', [ SnapmirrorInfo, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
}, {
'num-succeeded': [ int, False ],
'num-failed': [ int, False ],
'success-list': [ SnapmirrorModifyIterInfo, True ],
'failure-list': [ SnapmirrorModifyIterInfo, True ],
} )
def snapmirror_break(self, source_vserver=None, source_volume=None, source_cluster=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None, destination_cluster=None):
"""
Breaks a SnapMirror relationship between a source and
destination volume of a data protection mirror. When
Data ONTAP breaks the relationship, the destination volume is
made a read-write volume and can diverge from the source volume,
client redirection is turned off on the destination volume, the
restart checkpoint is cleared, and the clients can see the latest
Snapshot copy.
<p>
On Data ONTAP operating in 7-Mode, no check is done to
determine whether the operation is legal or successful.
You need to query the status afterward by using the
snapmirror-get-status API.
<p>
Subsequent manual or scheduled SnapMirror updates to the broken
relationship will fail until the SnapMirror relationship is
re-established using the snapmirror-resync API.
<p>
On Data ONTAP operating in Cluster-Mode, this API applies
only to data protection mirrors and not to load-sharing mirrors.
<p>
The snapmirror-break API must be issued on destination storage
system on Data ONTAP operating in 7-Mode, and on the destination
cluster on Data ONTAP 8.1 operating in Cluster-Mode, and on
the destination cluster or Vserver on Data ONTAP 8.2 or later
operating in Cluster-Mode.
<p>
This API is not supported if the destination end point is an
Infinite Volume.
:param source_vserver: Specifies the source Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source volume.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_volume: Specifies the source volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_cluster: Specifies the source cluster of the SnapMirror relationship. The
source Vserver and source volume must also be specified if using
this parameter.
:param destination_vserver: Specifies the destination Vserver of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, when specifying a
destination endpoint, you must use either the destination
location, or the destination cluster, destination Vserver,
and destination volume.
On Data ONTAP operating in 7-Mode, If the destination
endpoint is a qtree, it must be quiesced using
snapmirror-quiesce.
This parameter is mandatory on Data ONTAP 7-mode
:param destination_volume: Specifies the destination volume of the SnapMirror relationship.
If using this parameter, the following parameters must also be
specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2
operating in Cluster-Mode if the relationship control
plane is 'v1'.
</ul>
:param source_location: Specifies source endpoint of the SnapMirror
relationship in the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>]
On Data ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume>
On Data ONTAP 8.1 operating in Cluster-Mode, and on Data
ONTAP 8.2 operating in Cluster-Mode for relationships using
a control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode.
<li> <[vserver:]volume>
On Data ONTAP 8.2 or later operating in Cluster-Mode except
for relationships using a control plane compatible with Data
ONTAP 8.1 operating in Cluster-Mode. This format depends
on the Vserver peering setup between the source and
destination Vservers.
<ul>
This format may change in the future.
On Data ONTAP operating in Cluster-Mode, When specifying a
source endpoint, you must use either the source location, or the
source cluster, source Vserver, and source volume.
:param destination_cluster: Specifies the destination cluster of the SnapMirror relationship.
The destination Vserver and destination volume must also be
specified if using this parameter.
"""
return self.request( "snapmirror-break", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
} )
def snapmirror_create(self, source_vserver=None, source_volume=None, schedule=None, vserver=None, relationship_type=None, return_record=None, source_cluster=None, tries=None, destination_vserver=None, destination_location=None, policy=None, destination_volume=None, source_location=None, max_transfer_rate=None, destination_cluster=None):
"""
The snapmirror-create API creates a SnapMirror relationship
between a source and destination volumes. The following types of
relationships can be created:
<ul>
<li>'data_protection' - For Disater Recovery (DR);
<li>'load_sharing' - For load-sharing within the same Vserver;
<li>'vault' - For Vault;
<li>'transition_data_protection' - For 7-mode to Cluster-Mode
transition;
<li>'restore' - For restoring a data protection (DP) volume
data;
</ul>
On Data ONTAP 8.1 operating in Cluster-Mode, only
'data_protection' and 'load_sharing' type are supported.
Relationships of type 'restore' are created temporary during a
restore operation initiated using the snapmirror-restore API or
the corresponding command.
The source and destination endpoints must be specified when using
the snapmirror-create API.
This API will fail if the destination volume does not exist.
On Data ONTAP 8.2 operating in Cluster-Mode, the API does not
validate the attributes and the existence of the source volume.
It will succeed even if the source volume does not exist. The
validation will be done at the first transfer time.
On Data ONTAP 8.1 operating in Cluster-Mode, the API will fail if
the source volume is not in online state and read-write (RW)
type, or the destination volume is not in online state and a DP
(Data Protection) type.
Note: The source volume might contain data and Snapshot copies
prior to creating the Snapmirror relationship. If the destination
volume is not empty, it must have a Snapshot copy in common with
the source volume, that is, it must have once been a copy of the
source volume. On Data ONTAP 8.1 operating in Cluster-Mode, the
API will fail if there is no common Snapshot copy. On Data ONTAP
8.2 operating in Cluster-Mode the API will succeed, but all
subsequent transfers will fail if there is no common snapshot
copy.
Load-sharing mirrors have the following restrictions:
- They only use FlexVol volumes.
- They are confined to a single Vserver; they are not allowed to
span Vservers.
- The source or destination of a load-sharing relationship cannot
be the source or destination of any other SnapMirror
relationship.
A set of load-sharing mirrors can have one or more destination
volumes. You create separate SnapMirror relationships between the
common source volume and each destination volume to create the
set of load-sharing mirrors.
If the destination volume is empty, it must be initialized using
the snapmirror-initialize API. Destination volumes in a set of
load-sharing mirrors must be initialized using the
snapmirror-initialize-ls-set API.
The snapmirror-create API must be issued on the destination
cluster on Data ONTAP 8.1 operating in Cluster-Mode, and on the
destination Vserver on Data ONTAP 8.2 operating in Cluster-Mode.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of source volume.
<li> The name of the source cluster on Data ONTAP 8.1, or on Data
ONTAP 8.2 or later operating in Cluster-Mode if the relationship
control plane is 'v1'.
</ul>
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the source Vserver.
<li> The name of the source cluster on Data ONTAP 8.1 operating
in Cluster-Mode, or on Data ONTAP 8.2 or later operating in
Cluster-Mode if the relationship control plane is 'v1'.
</ul>
:param schedule: Specifies the name of the cron schedule, which is used to update
the SnapMirror relationship.
:param vserver: If this optional parameter is specified, designates the managing
Vserver. The managing Vserver is authorized to use snapmirror
commands to manage the SnapMirror relationship. The vserver
option is currently a reserved option and should not be used for
queries. The destination-vserver parameter should be used to
select the Vserver in a cluster context.
:param relationship_type: Specifies the type of the SnapMirror relationship.
Possible values:
<ul>
<li> "data_protection" ,
<li> "load_sharing" ,
<li> "vault" ,
<li> "restore" ,
<li> "transition_data_protection"
</ul>
:param return_record: If set to true, returns the snapmirror on successful creation.
Default: false
:param source_cluster: Specifies the name of the source cluster for the SnapMirror
relationship. The parameters for the name of the source Vserver,
and the name of the source volume must also be specified if using
this parameter. This parameter is available only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param tries: Specifies the maximum number of times to attempt each manual or
scheduled transfer for a SnapMirror relationship. The default is
eight times.
Note: You can set the tries option to zero (0) to disable manual
and scheduled updates for the SnapMirror relationship. This
parameter is only relevant on Data ONTAP 8.1 operating in
Cluster-Mode. On Data ONTAP 8.2 operating in Cluster-Mode, the
maximum number of times to attempt a transfer is an attribute of
the SnapMirror policy. Therefore the value of this parameter is
ignored.
:param destination_vserver: Specifies the name of the destination Vserver for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of destination volume.
<li> The name of the destination cluster on Data ONTAP 8.1, or on
Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
:param destination_location: Specifies the destination endpoint of the SnapMirror relationship
in one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The destination endpoint can be specified using the location
formats above, or by specifying the parameters for the name of
the destination Vserver, the name of the destination volume, and
the name of the destination cluster. The name of the destination
cluster is only required on Data ONTAP 8.1 operating in
Cluster-Mode.
:param policy: Specifies the name of the snapmirror policy for the relationship.
For SnapMirror relationships of type 'vault', the policy will
also have rules to select snapshot copies that must be
transferred. If no policy is specified, a default policy will be
applied depending on the type of the SnapMirror relationship.
This parameter is only available on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v2'.
:param destination_volume: Specifies the name of the destination volume for the SnapMirror
relationship. If using this parameter, the following parameters
should also be specified:
<ul>
<li> The name of the destination Vserver.
<li> The name of the destination cluster on Data ONTAP 8.1
operating in Cluster-Mode, or on Data ONTAP 8.2 or later
operating in Cluster-Mode if the relationship control plane is
'v1'.
</ul>
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
one of the following formats:
<ul>
<li> <system>:/vol/<volume>[/<qtree>] on Data
ONTAP operating in 7-Mode.
<li> [<cluster>:]//<vserver>/<volume> on Data
ONTAP 8.1 operating in Cluster-Mode, and on Data ONTAP 8.2
operating in Cluster-Mode for relationships using a control plane
compatible with Data ONTAP 8.1 operating in Cluster-Mode.
<li> <[vserver:]volume> on Data ONTAP 8.2 or later
operating in Cluster-Mode except for relationships using a
control plane compatible with Data ONTAP 8.1 operating in
Cluster-Mode. This format depends on Vserver peering setup
between the source and destination Vservers.
</ul>
This format may change in the future.
The source endpoint can be specified using the location formats
above, or by specifying the parameters for the name of the source
Vserver, the name of the source volume, and the name of the
source cluster. The name of the source cluster is only required
on Data ONTAP 8.1 operating in Cluster-Mode.
:param max_transfer_rate: Specifies the upper bound, in kilobytes per second, at which data
is transferred. The default is unlimited (0) which permits the
SnapMirror relationship to fully utilize the available network
bandwidth. The max-transfer-rate option does not affect
load-sharing transfers and transfers for other relationships with
Relationship Capability of Pre 8.2 confined to a single cluster.
:param destination_cluster: Specifies the destination cluster name for the SnapMirror
relationship. The parameters for the name of the destination
Vserver, and the name of the destination volume must also be
specified if using this parameter. This parameter is available
only on:
<ul>
<li> Data ONTAP 8.1 operating in Cluster-Mode.
<li> Data ONTAP 8.2 or later operating in Cluster-Mode if the
relationship control plane is 'v1'.
</ul>
"""
return self.request( "snapmirror-create", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'schedule': [ schedule, 'schedule', [ basestring, 'None' ], False ],
'vserver': [ vserver, 'vserver', [ basestring, 'None' ], False ],
'relationship_type': [ relationship_type, 'relationship-type', [ basestring, 'None' ], False ],
'return_record': [ return_record, 'return-record', [ bool, 'None' ], False ],
'source_cluster': [ source_cluster, 'source-cluster', [ basestring, 'None' ], False ],
'tries': [ tries, 'tries', [ basestring, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'policy': [ policy, 'policy', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
'max_transfer_rate': [ max_transfer_rate, 'max-transfer-rate', [ int, 'None' ], False ],
'destination_cluster': [ destination_cluster, 'destination-cluster', [ basestring, 'None' ], False ],
}, {
'result': [ SnapmirrorInfo, False ],
} )
def snapmirror_get_destination(self, source_vserver=None, source_volume=None, desired_attributes=None, destination_vserver=None, destination_location=None, destination_volume=None, source_location=None):
"""
The snapmirror-get-destination API returns information about a
SnapMirror relationship whose source endpoint is on the cluster
or Vserver it is issued on. The destination endpoint must be
specified when using this API. The source endpoint can also be
specified.
<p>
The cluster or Vserver may have several entries for the specified
endpoints, but with different relationship IDs. In this case the
API returns information about the first entry. To get information
about all the SnapMirror relationships matching the specified
endpoints, the snapmirror-get-destination-iter API must be used.
<p>
Note that the information for a SnapMirror relationship will not
be available on its source Vserver or source cluster until at
least one transfer is initiated.
<p>
The information returned can be stale. Stale information
corresponds to a SnapMirror relationship that has been deleted on
its destination cluster or Vserver.
<p>
This API is only supported on Data ONTAP 8.2 and above operating
in Cluster-Mode.
<p>
This API must be issued on the source Vserver or the source
cluster of the relationship.
:param source_vserver: Specifies the name of the source Vserver for the SnapMirror
relationship. The name of the source volume must also be
specified if using this parameter.
:param source_volume: Specifies the name of the source volume of the SnapMirror
relationship. The name of the source Vserver must also be
specified if using this parameter.
:param desired_attributes: Specify the attributes that should be returned.
If not present, all attributes for which information is available
will be returned.
If present, only the desired attributes for which information is
available will be returned.
:param destination_vserver: Specifies the name of the destination Vserver for the
relationship. The name of the destination volume must also be
specified if using this parameter.
:param destination_location: Specifies the destination endpoint of the SnapMirror
relationship in the format [vserver:]volume. This format may
change in the future.
The destination endpoint can be specified using the location
format above, or by specifying the parameters for the name of the
destination Vserver and the name of the destination volume.
:param destination_volume: Specifies the name of destination volume for the SnapMirror
relationshp. The name of the destination Vserver must also be
specified if using this parameter.
:param source_location: Specifies the source endpoint of the SnapMirror relationship in
the format [vserver:]volume. This format may change in the
future.
The source endpoint can be specified using the location format
above, or by specifying the parameters for the name of the source
Vserver and the name of the source volume.
"""
return self.request( "snapmirror-get-destination", {
'source_vserver': [ source_vserver, 'source-vserver', [ basestring, 'None' ], False ],
'source_volume': [ source_volume, 'source-volume', [ basestring, 'None' ], False ],
'desired_attributes': [ desired_attributes, 'desired-attributes', [ SnapmirrorDestinationInfo, 'None' ], False ],
'destination_vserver': [ destination_vserver, 'destination-vserver', [ basestring, 'None' ], False ],
'destination_location': [ destination_location, 'destination-location', [ basestring, 'None' ], False ],
'destination_volume': [ destination_volume, 'destination-volume', [ basestring, 'None' ], False ],
'source_location': [ source_location, 'source-location', [ basestring, 'None' ], False ],
}, {
'attributes': [ SnapmirrorDestinationInfo, False ],
} )
def snapmirror_snapshot_owner_create(self, vserver, volume, snapshot, snapshot_owner_name=None):
"""
Add an owner to preserve a Snapshot copy for a SnapMirror
mirror-to-vault cascade configuration.
:param vserver: Vserver Name
:param volume: Volume Name
:param snapshot: Snapshot Copy Name
:param snapshot_owner_name: Specifies the name of the owner to preserve a Snapshot copy for a
SnapMirror mirror-to-vault cascade configuration. An owner can
contain letters, numbers, and the underscore character (_), and
can be at most 32 characters long. If no owner is specified, the
system will assign an internal default owner to preserve the
Snapshot copy.
"""
return self.request( "snapmirror-snapshot-owner-create", {
'vserver': [ vserver, 'vserver', [ basestring, 'vserver-name' ], False ],
'volume': [ volume, 'volume', [ basestring, 'volume-name' ], False ],
'snapshot': [ snapshot, 'snapshot', [ basestring, 'snapshot-id' ], False ],
'snapshot_owner_name': [ snapshot_owner_name, 'snapshot-owner-name', [ basestring, 'snapshot-owner-name' ], False ],
}, {
} )
| 58.700463 | 359 | 0.617827 | 35,332 | 291,800 | 5.054115 | 0.024284 | 0.02903 | 0.026544 | 0.057904 | 0.876302 | 0.853028 | 0.836475 | 0.825757 | 0.815251 | 0.802595 | 0 | 0.00731 | 0.322121 | 291,800 | 4,970 | 360 | 58.712274 | 0.895458 | 0.685977 | 0 | 0.706447 | 0 | 0 | 0.250928 | 0.021829 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076818 | false | 0 | 0.050754 | 0 | 0.205761 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
165344794bfce0dc1069a5aa433fa9a13b519ebb | 3,406 | py | Python | rllib/examples/env/dm_control_suite.py | firebolt55439/ray | 215300b070628c06f0106906fc6c03bd70ebf140 | [
"Apache-2.0"
] | 3 | 2020-12-03T17:48:45.000Z | 2022-01-22T08:09:46.000Z | rllib/examples/env/dm_control_suite.py | firebolt55439/ray | 215300b070628c06f0106906fc6c03bd70ebf140 | [
"Apache-2.0"
] | 6 | 2022-03-18T14:06:24.000Z | 2022-03-26T07:13:16.000Z | rllib/examples/env/dm_control_suite.py | firebolt55439/ray | 215300b070628c06f0106906fc6c03bd70ebf140 | [
"Apache-2.0"
] | 2 | 2020-05-22T15:36:27.000Z | 2020-05-22T15:52:03.000Z | from ray.rllib.env.dm_control_wrapper import DMCEnv
"""
8 Environments from Deepmind Control Suite
"""
def acrobot_swingup(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"acrobot",
"swingup",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def walker_walk(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"walker",
"walk",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def hopper_hop(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"hopper",
"hop",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def hopper_stand(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"hopper",
"stand",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def cheetah_run(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"cheetah",
"run",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def walker_run(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"walker",
"run",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def pendulum_swingup(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"pendulum",
"swingup",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def cartpole_swingup(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"cartpole",
"swingup",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
def humanoid_walk(from_pixels=True,
height=64,
width=64,
frame_skip=2,
channels_first=True):
return DMCEnv(
"humanoid",
"walk",
from_pixels=from_pixels,
height=height,
width=width,
frame_skip=frame_skip,
channels_first=channels_first)
| 24.328571 | 51 | 0.527011 | 339 | 3,406 | 5.023599 | 0.109145 | 0.158544 | 0.073987 | 0.105696 | 0.89724 | 0.89724 | 0.89724 | 0.89724 | 0.89724 | 0.89724 | 0 | 0.022276 | 0.393717 | 3,406 | 139 | 52 | 24.503597 | 0.802421 | 0 | 0 | 0.855932 | 0 | 0 | 0.031287 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076271 | false | 0 | 0.008475 | 0.076271 | 0.161017 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
16990e76843ac040b6fcd0df93b60442472cb0d9 | 73 | py | Python | src/py/checks/versioncheck.py | Birch-san/gmusicapi-node | 0d32a55c9ad2eec763b1bf8d15c740601b68fda3 | [
"MIT"
] | 5 | 2015-11-09T18:33:48.000Z | 2017-05-20T13:33:26.000Z | src/py/checks/versioncheck.py | Birch-san/gmusicapi-node | 0d32a55c9ad2eec763b1bf8d15c740601b68fda3 | [
"MIT"
] | null | null | null | src/py/checks/versioncheck.py | Birch-san/gmusicapi-node | 0d32a55c9ad2eec763b1bf8d15c740601b68fda3 | [
"MIT"
] | null | null | null | import sys
print (sys.version_info.major)
print (sys.version_info.minor) | 18.25 | 30 | 0.808219 | 12 | 73 | 4.75 | 0.583333 | 0.280702 | 0.526316 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.082192 | 73 | 4 | 31 | 18.25 | 0.850746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 8 |
16f03ef5d84420e1c3f45272dbfc7b5860675378 | 3,420 | py | Python | transformers/nlp/text_char_tfidf_count_transformers.py | james94/driverlessai-recipes | 87c35460db59ffda8dc18ad82cb3a9b8291410e4 | [
"Apache-2.0"
] | 194 | 2019-04-23T10:25:13.000Z | 2022-03-29T04:19:28.000Z | transformers/nlp/text_char_tfidf_count_transformers.py | james94/driverlessai-recipes | 87c35460db59ffda8dc18ad82cb3a9b8291410e4 | [
"Apache-2.0"
] | 50 | 2019-06-24T20:17:51.000Z | 2022-03-16T20:05:37.000Z | transformers/nlp/text_char_tfidf_count_transformers.py | james94/driverlessai-recipes | 87c35460db59ffda8dc18ad82cb3a9b8291410e4 | [
"Apache-2.0"
] | 85 | 2019-03-27T12:26:43.000Z | 2022-01-27T13:15:37.000Z | """Character level TFIDF and Count followed by Truncated SVD on text columns"""
from h2oaicore.transformer_utils import CustomTransformer
import datatable as dt
import numpy as np
import string
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from sklearn.decomposition import TruncatedSVD
class TextCharTFIDFTransformer(CustomTransformer):
_testing_can_skip_failure = False # ensure tested as if shouldn't fail
def __init__(self, max_ngram, n_svd_comp, **kwargs):
super().__init__(**kwargs)
self.max_ngram = max_ngram
self.n_svd_comp = n_svd_comp
@staticmethod
def do_acceptance_test():
return True
@staticmethod
def get_default_properties():
return dict(col_type="text", min_cols=1, max_cols=1, relative_importance=1)
@staticmethod
def get_parameter_choices():
return {"max_ngram": [3, 2, 1],
"n_svd_comp": [50, 20, 100]}
@property
def display_name(self):
return f"CharTFIDF_{self.max_ngram}maxgram_SVD_{self.n_svd_comp}comp"
def fit_transform(self, X: dt.Frame, y: np.array = None):
X = X.to_pandas().astype(str).iloc[:, 0].fillna("NA")
# TFIDF Vectorizer
self.tfidf_vec = TfidfVectorizer(analyzer="char", ngram_range=(1, self.max_ngram))
X = self.tfidf_vec.fit_transform(X)
# Truncated SVD
if len(self.tfidf_vec.vocabulary_) <= self.n_svd_comp:
self.n_svd_comp = len(self.tfidf_vec.vocabulary_) - 1
self.truncated_svd = TruncatedSVD(n_components=self.n_svd_comp, random_state=2019)
X = self.truncated_svd.fit_transform(X)
return X
def transform(self, X: dt.Frame):
X = X.to_pandas().astype(str).iloc[:, 0].fillna("NA")
X = self.tfidf_vec.transform(X)
X = self.truncated_svd.transform(X)
return X
class TextCharCountTransformer(CustomTransformer):
_testing_can_skip_failure = False # ensure tested as if shouldn't fail
def __init__(self, max_ngram, n_svd_comp, **kwargs):
super().__init__(**kwargs)
self.max_ngram = max_ngram
self.n_svd_comp = n_svd_comp
@staticmethod
def do_acceptance_test():
return True
@staticmethod
def get_default_properties():
return dict(col_type="text", min_cols=1, max_cols=1, relative_importance=1)
@staticmethod
def get_parameter_choices():
return {"max_ngram": [3, 2, 1],
"n_svd_comp": [50, 20, 100]}
@property
def display_name(self):
return f"CharCount_max{self.max_ngram}gram_SVD_{self.n_svd_comp}comp"
def fit_transform(self, X: dt.Frame, y: np.array = None):
X = X.to_pandas().astype(str).iloc[:, 0].fillna("NA")
# Count Vectorizer
self.cnt_vec = CountVectorizer(analyzer="char", ngram_range=(1, self.max_ngram))
X = self.cnt_vec.fit_transform(X)
# Truncated SVD
if len(self.cnt_vec.vocabulary_) <= self.n_svd_comp:
self.n_svd_comp = len(self.cnt_vec.vocabulary_) - 1
self.truncated_svd = TruncatedSVD(n_components=self.n_svd_comp, random_state=2019)
X = self.truncated_svd.fit_transform(X)
return X
def transform(self, X: dt.Frame):
X = X.to_pandas().astype(str).iloc[:, 0].fillna("NA")
X = self.cnt_vec.transform(X)
X = self.truncated_svd.transform(X)
return X
| 35.625 | 90 | 0.667836 | 474 | 3,420 | 4.535865 | 0.238397 | 0.029767 | 0.059535 | 0.055814 | 0.775814 | 0.768372 | 0.768372 | 0.768372 | 0.768372 | 0.733953 | 0 | 0.016135 | 0.22076 | 3,420 | 95 | 91 | 36 | 0.790619 | 0.060234 | 0 | 0.722222 | 0 | 0 | 0.056197 | 0.03684 | 0 | 0 | 0 | 0 | 0 | 1 | 0.194444 | false | 0 | 0.111111 | 0.111111 | 0.527778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
bc57c1184917ff3795d03978ba8759057d506713 | 45 | py | Python | data/__init__.py | zkcys001/distracting_feature | 508c4f7a1b2e6a99407a44e33e630803a3d0c89d | [
"Apache-1.1"
] | 25 | 2019-11-28T13:39:09.000Z | 2021-11-25T05:30:47.000Z | data/__init__.py | zkcys001/distracting_feature | 508c4f7a1b2e6a99407a44e33e630803a3d0c89d | [
"Apache-1.1"
] | 2 | 2020-01-11T15:36:12.000Z | 2020-02-27T06:57:59.000Z | data/__init__.py | zkcys001/distracting_feature | 508c4f7a1b2e6a99407a44e33e630803a3d0c89d | [
"Apache-1.1"
] | 4 | 2019-12-25T07:57:40.000Z | 2021-09-21T16:42:51.000Z | import data.structure
import data.preprocess
| 15 | 22 | 0.866667 | 6 | 45 | 6.5 | 0.666667 | 0.512821 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088889 | 45 | 2 | 23 | 22.5 | 0.95122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bc6e7dfdd5ccfe873fe655c0ad9cf569adfff3a9 | 72 | py | Python | tests/test_A000040.py | jphayek/OEIS-Python | f9614770290aa3f24cbf322ba6c8f4a89f262719 | [
"MIT"
] | null | null | null | tests/test_A000040.py | jphayek/OEIS-Python | f9614770290aa3f24cbf322ba6c8f4a89f262719 | [
"MIT"
] | null | null | null | tests/test_A000040.py | jphayek/OEIS-Python | f9614770290aa3f24cbf322ba6c8f4a89f262719 | [
"MIT"
] | null | null | null | from oeis import A000040
def test_prime():
assert A000040(0,9)==true | 24 | 29 | 0.736111 | 12 | 72 | 4.333333 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.229508 | 0.152778 | 72 | 3 | 29 | 24 | 0.622951 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bcabbde3e1e39029d47097e046146007e0e9b3a3 | 67,816 | py | Python | theano/tensor/blas_headers.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1 | 2022-01-25T22:52:58.000Z | 2022-01-25T22:52:58.000Z | theano/tensor/blas_headers.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | theano/tensor/blas_headers.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | """ Header text for the C and Fortran BLAS interfaces.
There is no standard name or location for this header, so we just insert it
ourselves into the C code.
"""
from __future__ import absolute_import, print_function, division
import logging
import textwrap
import sys
import os
from os.path import dirname
from theano import config
from theano.gof.cmodule import GCC_compiler
_logger = logging.getLogger('theano.tensor.blas')
def detect_macos_sdot_bug():
"""
Try to detect a bug in the default BLAS in MacOS.
The problem in Theano has been reported in gh-1240,
the underlying bug has been confirmed in
http://www.macresearch.org/lapackblas-fortran-106#comment-17227.
This function tries to compile code triggering that bug,
and, if necessary, an attempted fix.
Three attributes of this function will be set:
- detect_macos_sdot_bug.tested will be set to True
when this function is called.
- detect_macos_sdot_bug.present will be set to True if the bug is
detected. Its value is returned by the function
- detect_macos_sdot_bug.fix_works will be set to True if the fix was
attempted, and succeeded.
"""
_logger.debug('Starting detection of bug in Mac OS BLAS sdot_ routine')
if detect_macos_sdot_bug.tested:
return detect_macos_sdot_bug.present
if sys.platform != 'darwin' or not config.blas.ldflags:
_logger.info('Not Mac OS, no sdot_ bug')
detect_macos_sdot_bug.tested = True
return False
# This code will return -1 if the dot product did not return
# the right value (30.).
flags = config.blas.ldflags.split()
for f in flags:
# Library directories should also be added as rpath,
# so that they can be loaded even if the environment
# variable LD_LIBRARY_PATH does not contain them
lib_path = os.environ.get('DYLD_FALLBACK_LIBRARY_PATH', '').split(':')
if f.startswith('-L'):
flags.append('-Wl,-rpath,' + f[2:])
# also append those paths to DYLD_FALLBACK_LIBRARY_PATH to
# support libraries that have the wrong install_name
# (such as MKL on canopy installs)
if (f[2:] not in lib_path):
lib_path.append(f[2:])
# this goes into the python process environment that is
# inherited by subprocesses/used by dyld when loading new objects
os.environ['DYLD_FALLBACK_LIBRARY_PATH'] = ':'.join(lib_path)
test_code = textwrap.dedent("""\
extern "C" float sdot_(int*, float*, int*, float*, int*);
int main(int argc, char** argv)
{
int Nx = 5;
int Sx = 1;
float x[5] = {0, 1, 2, 3, 4};
float r = sdot_(&Nx, x, &Sx, x, &Sx);
if ((r - 30.f) > 1e-6 || (r - 30.f) < -1e-6)
{
return -1;
}
return 0;
}
""")
_logger.debug('Trying to compile and run test case.')
compilation_ok, run_ok = GCC_compiler.try_compile_tmp(
test_code,
tmp_prefix='detect_macos_sdot_bug_',
flags=flags, try_run=True)
detect_macos_sdot_bug.tested = True
# If compilation failed, we consider there is a bug,
# and the fix does not work
if not compilation_ok:
_logger.info('Could not compile test case for sdot_.')
detect_macos_sdot_bug.present = True
return True
if run_ok:
_logger.info('The sdot_ bug is not present on this system.')
detect_macos_sdot_bug.present = False
return False
# Else, the bug is detected.
_logger.info('The sdot_ bug is present on this system.')
detect_macos_sdot_bug.present = True
# Then, try a simple fix
test_fix_code = textwrap.dedent("""\
extern "C" float cblas_sdot(int, float*, int, float*, int);
static float sdot_(int* Nx, float* x, int* Sx, float* y, int* Sy)
{
return cblas_sdot(*Nx, x, *Sx, y, *Sy);
}
int main(int argc, char** argv)
{
int Nx = 5;
int Sx = 1;
float x[5] = {0, 1, 2, 3, 4};
float r = sdot_(&Nx, x, &Sx, x, &Sx);
if ((r - 30.f) > 1e-6 || (r - 30.f) < -1e-6)
{
return -1;
}
return 0;
}
""")
_logger.debug('Trying to compile and run tentative workaround.')
compilation_fix_ok, run_fix_ok = GCC_compiler.try_compile_tmp(
test_fix_code,
tmp_prefix='detect_macos_sdot_bug_testfix_',
flags=flags,
try_run=True)
_logger.info("Status of tentative fix -- compilation OK: %s, works: %s",
compilation_fix_ok, run_fix_ok)
detect_macos_sdot_bug.fix_works = run_fix_ok
return detect_macos_sdot_bug.present
detect_macos_sdot_bug.tested = False
detect_macos_sdot_bug.present = False
detect_macos_sdot_bug.fix_works = False
def cblas_header_text():
"""C header for the cblas interface."""
return """
//#include <stddef.h>
#undef __BEGIN_DECLS
#undef __END_DECLS
#ifdef __cplusplus
#define __BEGIN_DECLS extern "C" {
#define __END_DECLS }
#else
#define __BEGIN_DECLS /* empty */
#define __END_DECLS /* empty */
#endif
__BEGIN_DECLS
#define MOD %
/*
* Enumerated and derived types
*/
#define CBLAS_INDEX size_t /* this may vary between platforms */
enum CBLAS_ORDER {CblasRowMajor=101, CblasColMajor=102};
enum CBLAS_TRANSPOSE {CblasNoTrans=111, CblasTrans=112, CblasConjTrans=113};
enum CBLAS_UPLO {CblasUpper=121, CblasLower=122};
enum CBLAS_DIAG {CblasNonUnit=131, CblasUnit=132};
enum CBLAS_SIDE {CblasLeft=141, CblasRight=142};
float cblas_sdsdot(const int N, const float alpha, const float *X,
const int incX, const float *Y, const int incY);
double cblas_dsdot(const int N, const float *X, const int incX, const float *Y,
const int incY);
float cblas_sdot(const int N, const float *X, const int incX,
const float *Y, const int incY);
double cblas_ddot(const int N, const double *X, const int incX,
const double *Y, const int incY);
/*
* Functions having prefixes Z and C only
*/
void cblas_cdotu_sub(const int N, const void *X, const int incX,
const void *Y, const int incY, void *dotu);
void cblas_cdotc_sub(const int N, const void *X, const int incX,
const void *Y, const int incY, void *dotc);
void cblas_zdotu_sub(const int N, const void *X, const int incX,
const void *Y, const int incY, void *dotu);
void cblas_zdotc_sub(const int N, const void *X, const int incX,
const void *Y, const int incY, void *dotc);
/*
* Functions having prefixes S D SC DZ
*/
float cblas_snrm2(const int N, const float *X, const int incX);
float cblas_sasum(const int N, const float *X, const int incX);
double cblas_dnrm2(const int N, const double *X, const int incX);
double cblas_dasum(const int N, const double *X, const int incX);
float cblas_scnrm2(const int N, const void *X, const int incX);
float cblas_scasum(const int N, const void *X, const int incX);
double cblas_dznrm2(const int N, const void *X, const int incX);
double cblas_dzasum(const int N, const void *X, const int incX);
/*
* Functions having standard 4 prefixes (S D C Z)
*/
CBLAS_INDEX cblas_isamax(const int N, const float *X, const int incX);
CBLAS_INDEX cblas_idamax(const int N, const double *X, const int incX);
CBLAS_INDEX cblas_icamax(const int N, const void *X, const int incX);
CBLAS_INDEX cblas_izamax(const int N, const void *X, const int incX);
/*
* ===========================================================================
* Prototypes for level 1 BLAS routines
* ===========================================================================
*/
/*
* Routines with standard 4 prefixes (s, d, c, z)
*/
void cblas_sswap(const int N, float *X, const int incX,
float *Y, const int incY);
void cblas_scopy(const int N, const float *X, const int incX,
float *Y, const int incY);
void cblas_saxpy(const int N, const float alpha, const float *X,
const int incX, float *Y, const int incY);
void cblas_dswap(const int N, double *X, const int incX,
double *Y, const int incY);
void cblas_dcopy(const int N, const double *X, const int incX,
double *Y, const int incY);
void cblas_daxpy(const int N, const double alpha, const double *X,
const int incX, double *Y, const int incY);
void cblas_cswap(const int N, void *X, const int incX,
void *Y, const int incY);
void cblas_ccopy(const int N, const void *X, const int incX,
void *Y, const int incY);
void cblas_caxpy(const int N, const void *alpha, const void *X,
const int incX, void *Y, const int incY);
void cblas_zswap(const int N, void *X, const int incX,
void *Y, const int incY);
void cblas_zcopy(const int N, const void *X, const int incX,
void *Y, const int incY);
void cblas_zaxpy(const int N, const void *alpha, const void *X,
const int incX, void *Y, const int incY);
/*
* Routines with S and D prefix only
*/
void cblas_srotg(float *a, float *b, float *c, float *s);
void cblas_srotmg(float *d1, float *d2, float *b1, const float b2, float *P);
void cblas_srot(const int N, float *X, const int incX,
float *Y, const int incY, const float c, const float s);
void cblas_srotm(const int N, float *X, const int incX,
float *Y, const int incY, const float *P);
void cblas_drotg(double *a, double *b, double *c, double *s);
void cblas_drotmg(double *d1, double *d2, double *b1, const double b2, double *P);
void cblas_drot(const int N, double *X, const int incX,
double *Y, const int incY, const double c, const double s);
void cblas_drotm(const int N, double *X, const int incX,
double *Y, const int incY, const double *P);
/*
* Routines with S D C Z CS and ZD prefixes
*/
void cblas_sscal(const int N, const float alpha, float *X, const int incX);
void cblas_dscal(const int N, const double alpha, double *X, const int incX);
void cblas_cscal(const int N, const void *alpha, void *X, const int incX);
void cblas_zscal(const int N, const void *alpha, void *X, const int incX);
void cblas_csscal(const int N, const float alpha, void *X, const int incX);
void cblas_zdscal(const int N, const double alpha, void *X, const int incX);
/*
* ===========================================================================
* Prototypes for level 2 BLAS
* ===========================================================================
*/
/*
* Routines with standard 4 prefixes (S, D, C, Z)
*/
void cblas_sgemv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const float alpha, const float *A, const int lda,
const float *X, const int incX, const float beta,
float *Y, const int incY);
void cblas_sgbmv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const int KL, const int KU, const float alpha,
const float *A, const int lda, const float *X,
const int incX, const float beta, float *Y, const int incY);
void cblas_strmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const float *A, const int lda,
float *X, const int incX);
void cblas_stbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const float *A, const int lda,
float *X, const int incX);
void cblas_stpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const float *Ap, float *X, const int incX);
void cblas_strsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const float *A, const int lda, float *X,
const int incX);
void cblas_stbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const float *A, const int lda,
float *X, const int incX);
void cblas_stpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const float *Ap, float *X, const int incX);
void cblas_dgemv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const double alpha, const double *A, const int lda,
const double *X, const int incX, const double beta,
double *Y, const int incY);
void cblas_dgbmv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const int KL, const int KU, const double alpha,
const double *A, const int lda, const double *X,
const int incX, const double beta, double *Y, const int incY);
void cblas_dtrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const double *A, const int lda,
double *X, const int incX);
void cblas_dtbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const double *A, const int lda,
double *X, const int incX);
void cblas_dtpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const double *Ap, double *X, const int incX);
void cblas_dtrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const double *A, const int lda, double *X,
const int incX);
void cblas_dtbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const double *A, const int lda,
double *X, const int incX);
void cblas_dtpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const double *Ap, double *X, const int incX);
void cblas_cgemv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const void *alpha, const void *A, const int lda,
const void *X, const int incX, const void *beta,
void *Y, const int incY);
void cblas_cgbmv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const int KL, const int KU, const void *alpha,
const void *A, const int lda, const void *X,
const int incX, const void *beta, void *Y, const int incY);
void cblas_ctrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *A, const int lda,
void *X, const int incX);
void cblas_ctbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const void *A, const int lda,
void *X, const int incX);
void cblas_ctpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *Ap, void *X, const int incX);
void cblas_ctrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *A, const int lda, void *X,
const int incX);
void cblas_ctbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const void *A, const int lda,
void *X, const int incX);
void cblas_ctpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *Ap, void *X, const int incX);
void cblas_zgemv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const void *alpha, const void *A, const int lda,
const void *X, const int incX, const void *beta,
void *Y, const int incY);
void cblas_zgbmv(const enum CBLAS_ORDER order,
const enum CBLAS_TRANSPOSE TransA, const int M, const int N,
const int KL, const int KU, const void *alpha,
const void *A, const int lda, const void *X,
const int incX, const void *beta, void *Y, const int incY);
void cblas_ztrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *A, const int lda,
void *X, const int incX);
void cblas_ztbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const void *A, const int lda,
void *X, const int incX);
void cblas_ztpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *Ap, void *X, const int incX);
void cblas_ztrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *A, const int lda, void *X,
const int incX);
void cblas_ztbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const int K, const void *A, const int lda,
void *X, const int incX);
void cblas_ztpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,
const int N, const void *Ap, void *X, const int incX);
/*
* Routines with S and D prefixes only
*/
void cblas_ssymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const float *A,
const int lda, const float *X, const int incX,
const float beta, float *Y, const int incY);
void cblas_ssbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const int K, const float alpha, const float *A,
const int lda, const float *X, const int incX,
const float beta, float *Y, const int incY);
void cblas_sspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const float *Ap,
const float *X, const int incX,
const float beta, float *Y, const int incY);
void cblas_sger(const enum CBLAS_ORDER order, const int M, const int N,
const float alpha, const float *X, const int incX,
const float *Y, const int incY, float *A, const int lda);
void cblas_ssyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const float *X,
const int incX, float *A, const int lda);
void cblas_sspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const float *X,
const int incX, float *Ap);
void cblas_ssyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const float *X,
const int incX, const float *Y, const int incY, float *A,
const int lda);
void cblas_sspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const float *X,
const int incX, const float *Y, const int incY, float *A);
void cblas_dsymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const double *A,
const int lda, const double *X, const int incX,
const double beta, double *Y, const int incY);
void cblas_dsbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const int K, const double alpha, const double *A,
const int lda, const double *X, const int incX,
const double beta, double *Y, const int incY);
void cblas_dspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const double *Ap,
const double *X, const int incX,
const double beta, double *Y, const int incY);
void cblas_dger(const enum CBLAS_ORDER order, const int M, const int N,
const double alpha, const double *X, const int incX,
const double *Y, const int incY, double *A, const int lda);
void cblas_dsyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const double *X,
const int incX, double *A, const int lda);
void cblas_dspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const double *X,
const int incX, double *Ap);
void cblas_dsyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const double *X,
const int incX, const double *Y, const int incY, double *A,
const int lda);
void cblas_dspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const double *X,
const int incX, const double *Y, const int incY, double *A);
/*
* Routines with C and Z prefixes only
*/
void cblas_chemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const void *alpha, const void *A,
const int lda, const void *X, const int incX,
const void *beta, void *Y, const int incY);
void cblas_chbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const int K, const void *alpha, const void *A,
const int lda, const void *X, const int incX,
const void *beta, void *Y, const int incY);
void cblas_chpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const void *alpha, const void *Ap,
const void *X, const int incX,
const void *beta, void *Y, const int incY);
void cblas_cgeru(const enum CBLAS_ORDER order, const int M, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *A, const int lda);
void cblas_cgerc(const enum CBLAS_ORDER order, const int M, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *A, const int lda);
void cblas_cher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const void *X, const int incX,
void *A, const int lda);
void cblas_chpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const float alpha, const void *X,
const int incX, void *A);
void cblas_cher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *A, const int lda);
void cblas_chpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *Ap);
void cblas_zhemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const void *alpha, const void *A,
const int lda, const void *X, const int incX,
const void *beta, void *Y, const int incY);
void cblas_zhbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const int K, const void *alpha, const void *A,
const int lda, const void *X, const int incX,
const void *beta, void *Y, const int incY);
void cblas_zhpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const void *alpha, const void *Ap,
const void *X, const int incX,
const void *beta, void *Y, const int incY);
void cblas_zgeru(const enum CBLAS_ORDER order, const int M, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *A, const int lda);
void cblas_zgerc(const enum CBLAS_ORDER order, const int M, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *A, const int lda);
void cblas_zher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const void *X, const int incX,
void *A, const int lda);
void cblas_zhpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,
const int N, const double alpha, const void *X,
const int incX, void *A);
void cblas_zher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *A, const int lda);
void cblas_zhpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,
const void *alpha, const void *X, const int incX,
const void *Y, const int incY, void *Ap);
/*
* ===========================================================================
* Prototypes for level 3 BLAS
* ===========================================================================
*/
/*
* Routines with standard 4 prefixes (S, D, C, Z)
*/
void cblas_sgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_TRANSPOSE TransB, const int M, const int N,
const int K, const float alpha, const float *A,
const int lda, const float *B, const int ldb,
const float beta, float *C, const int ldc);
void cblas_ssymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const int M, const int N,
const float alpha, const float *A, const int lda,
const float *B, const int ldb, const float beta,
float *C, const int ldc);
void cblas_ssyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const float alpha, const float *A, const int lda,
const float beta, float *C, const int ldc);
void cblas_ssyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const float alpha, const float *A, const int lda,
const float *B, const int ldb, const float beta,
float *C, const int ldc);
void cblas_strmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const float alpha, const float *A, const int lda,
float *B, const int ldb);
void cblas_strsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const float alpha, const float *A, const int lda,
float *B, const int ldb);
void cblas_dgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_TRANSPOSE TransB, const int M, const int N,
const int K, const double alpha, const double *A,
const int lda, const double *B, const int ldb,
const double beta, double *C, const int ldc);
void cblas_dsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const int M, const int N,
const double alpha, const double *A, const int lda,
const double *B, const int ldb, const double beta,
double *C, const int ldc);
void cblas_dsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const double alpha, const double *A, const int lda,
const double beta, double *C, const int ldc);
void cblas_dsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const double alpha, const double *A, const int lda,
const double *B, const int ldb, const double beta,
double *C, const int ldc);
void cblas_dtrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const double alpha, const double *A, const int lda,
double *B, const int ldb);
void cblas_dtrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const double alpha, const double *A, const int lda,
double *B, const int ldb);
void cblas_cgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_TRANSPOSE TransB, const int M, const int N,
const int K, const void *alpha, const void *A,
const int lda, const void *B, const int ldb,
const void *beta, void *C, const int ldc);
void cblas_csymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const int M, const int N,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const void *beta,
void *C, const int ldc);
void cblas_csyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const void *alpha, const void *A, const int lda,
const void *beta, void *C, const int ldc);
void cblas_csyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const void *beta,
void *C, const int ldc);
void cblas_ctrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const void *alpha, const void *A, const int lda,
void *B, const int ldb);
void cblas_ctrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const void *alpha, const void *A, const int lda,
void *B, const int ldb);
void cblas_zgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_TRANSPOSE TransB, const int M, const int N,
const int K, const void *alpha, const void *A,
const int lda, const void *B, const int ldb,
const void *beta, void *C, const int ldc);
void cblas_zsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const int M, const int N,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const void *beta,
void *C, const int ldc);
void cblas_zsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const void *alpha, const void *A, const int lda,
const void *beta, void *C, const int ldc);
void cblas_zsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const void *beta,
void *C, const int ldc);
void cblas_ztrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const void *alpha, const void *A, const int lda,
void *B, const int ldb);
void cblas_ztrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,
const enum CBLAS_DIAG Diag, const int M, const int N,
const void *alpha, const void *A, const int lda,
void *B, const int ldb);
/*
* Routines with prefixes C and Z only
*/
void cblas_chemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const int M, const int N,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const void *beta,
void *C, const int ldc);
void cblas_cherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const float alpha, const void *A, const int lda,
const float beta, void *C, const int ldc);
void cblas_cher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const float beta,
void *C, const int ldc);
void cblas_zhemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,
const enum CBLAS_UPLO Uplo, const int M, const int N,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const void *beta,
void *C, const int ldc);
void cblas_zherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const double alpha, const void *A, const int lda,
const double beta, void *C, const int ldc);
void cblas_zher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,
const enum CBLAS_TRANSPOSE Trans, const int N, const int K,
const void *alpha, const void *A, const int lda,
const void *B, const int ldb, const double beta,
void *C, const int ldc);
void cblas_xerbla(int p, const char *rout, const char *form, ...);
__END_DECLS
"""
def blas_header_text():
"""C header for the fortran blas interface"""
blas_code = ""
if not config.blas.ldflags:
# Include the Numpy version implementation of [sd]gemm_.
current_filedir = dirname(__file__)
blas_common_filepath = os.path.join(current_filedir, 'c_code', 'alt_blas_common.h')
blas_template_filepath = os.path.join(current_filedir, 'c_code', 'alt_blas_template.c')
common_code = ""
sblas_code = ""
dblas_code = ""
with open(blas_common_filepath) as code:
common_code = code.read()
with open(blas_template_filepath) as code:
template_code = code.read()
sblas_code = template_code % {"float_type": "float", "float_size": 4, "npy_float": "NPY_FLOAT32", "precision": "s"}
dblas_code = template_code % {"float_type": "double", "float_size": 8, "npy_float": "NPY_FLOAT64", "precision": "d"}
if not common_code or not template_code:
raise IOError("Unable to load NumPy implementation of BLAS functions from C source files.")
blas_code += common_code
blas_code += sblas_code
blas_code += dblas_code
header = """
extern "C"
{
void xerbla_(char*, void *);
/***********/
/* Level 1 */
/***********/
/* Single Precision */
void srot_(const int*, float *, const int*, float *, const int*, const float *, const float *);
void srotg_(float *,float *,float *,float *);
void srotm_( const int*, float *, const int*, float *, const int*, const float *);
void srotmg_(float *,float *,float *,const float *, float *);
void sswap_( const int*, float *, const int*, float *, const int*);
void scopy_( const int*, const float *, const int*, float *, const int*);
void saxpy_( const int*, const float *, const float *, const int*, float *, const int*);
float sdot_(const int*, const float *, const int*, const float *, const int*);
void sdot_sub_(const int*, const float *, const int*, const float *, const int*, float *);
void sdsdot_sub_( const int*, const float *, const float *, const int*, const float *, const int*, float *);
void sscal_( const int*, const float *, float *, const int*);
void snrm2_sub_( const int*, const float *, const int*, float *);
void sasum_sub_( const int*, const float *, const int*, float *);
void isamax_sub_( const int*, const float * , const int*, const int*);
/* Double Precision */
void drot_(const int*, double *, const int*, double *, const int*, const double *, const double *);
void drotg_(double *,double *,double *,double *);
void drotm_( const int*, double *, const int*, double *, const int*, const double *);
void drotmg_(double *,double *,double *,const double *, double *);
void dswap_( const int*, double *, const int*, double *, const int*);
void dcopy_( const int*, const double *, const int*, double *, const int*);
void daxpy_( const int*, const double *, const double *, const int*, double *, const int*);
void dswap_( const int*, double *, const int*, double *, const int*);
double ddot_(const int*, const double *, const int*, const double *, const int*);
void dsdot_sub_(const int*, const float *, const int*, const float *, const int*, double *);
void ddot_sub_( const int*, const double *, const int*, const double *, const int*, double *);
void dscal_( const int*, const double *, double *, const int*);
void dnrm2_sub_( const int*, const double *, const int*, double *);
void dasum_sub_( const int*, const double *, const int*, double *);
void idamax_sub_( const int*, const double * , const int*, const int*);
/* Single Complex Precision */
void cswap_( const int*, void *, const int*, void *, const int*);
void ccopy_( const int*, const void *, const int*, void *, const int*);
void caxpy_( const int*, const void *, const void *, const int*, void *, const int*);
void cswap_( const int*, void *, const int*, void *, const int*);
void cdotc_sub_( const int*, const void *, const int*, const void *, const int*, void *);
void cdotu_sub_( const int*, const void *, const int*, const void *, const int*, void *);
void cscal_( const int*, const void *, void *, const int*);
void icamax_sub_( const int*, const void *, const int*, const int*);
void csscal_( const int*, const float *, void *, const int*);
void scnrm2_sub_( const int*, const void *, const int*, float *);
void scasum_sub_( const int*, const void *, const int*, float *);
/* Double Complex Precision */
void zswap_( const int*, void *, const int*, void *, const int*);
void zcopy_( const int*, const void *, const int*, void *, const int*);
void zaxpy_( const int*, const void *, const void *, const int*, void *, const int*);
void zswap_( const int*, void *, const int*, void *, const int*);
void zdotc_sub_( const int*, const void *, const int*, const void *, const int*, void *);
void zdotu_sub_( const int*, const void *, const int*, const void *, const int*, void *);
void zdscal_( const int*, const double *, void *, const int*);
void zscal_( const int*, const void *, void *, const int*);
void dznrm2_sub_( const int*, const void *, const int*, double *);
void dzasum_sub_( const int*, const void *, const int*, double *);
void izamax_sub_( const int*, const void *, const int*, const int*);
/***********/
/* Level 2 */
/***********/
/* Single Precision */
void sgemv_(char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void sgbmv_(char*, const int*, const int*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void ssymv_(char*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void ssbmv_(char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void sspmv_(char*, const int*, const float *, const float *, const float *, const int*, const float *, float *, const int*);
void strmv_( char*, char*, char*, const int*, const float *, const int*, float *, const int*);
void stbmv_( char*, char*, char*, const int*, const int*, const float *, const int*, float *, const int*);
void strsv_( char*, char*, char*, const int*, const float *, const int*, float *, const int*);
void stbsv_( char*, char*, char*, const int*, const int*, const float *, const int*, float *, const int*);
void stpmv_( char*, char*, char*, const int*, const float *, float *, const int*);
void stpsv_( char*, char*, char*, const int*, const float *, float *, const int*);
void sger_( const int*, const int*, const float *, const float *, const int*, const float *, const int*, float *, const int*);
void ssyr_(char*, const int*, const float *, const float *, const int*, float *, const int*);
void sspr_(char*, const int*, const float *, const float *, const int*, float *);
void sspr2_(char*, const int*, const float *, const float *, const int*, const float *, const int*, float *);
void ssyr2_(char*, const int*, const float *, const float *, const int*, const float *, const int*, float *, const int*);
/* Double Precision */
void dgemv_(char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void dgbmv_(char*, const int*, const int*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void dsymv_(char*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void dsbmv_(char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void dspmv_(char*, const int*, const double *, const double *, const double *, const int*, const double *, double *, const int*);
void dtrmv_( char*, char*, char*, const int*, const double *, const int*, double *, const int*);
void dtbmv_( char*, char*, char*, const int*, const int*, const double *, const int*, double *, const int*);
void dtrsv_( char*, char*, char*, const int*, const double *, const int*, double *, const int*);
void dtbsv_( char*, char*, char*, const int*, const int*, const double *, const int*, double *, const int*);
void dtpmv_( char*, char*, char*, const int*, const double *, double *, const int*);
void dtpsv_( char*, char*, char*, const int*, const double *, double *, const int*);
void dger_( const int*, const int*, const double *, const double *, const int*, const double *, const int*, double *, const int*);
void dsyr_(char*, const int*, const double *, const double *, const int*, double *, const int*);
void dspr_(char*, const int*, const double *, const double *, const int*, double *);
void dspr2_(char*, const int*, const double *, const double *, const int*, const double *, const int*, double *);
void dsyr2_(char*, const int*, const double *, const double *, const int*, const double *, const int*, double *, const int*);
/* Single Complex Precision */
void cgemv_(char*, const int*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void cgbmv_(char*, const int*, const int*, const int*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void chemv_(char*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void chbmv_(char*, const int*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void chpmv_(char*, const int*, const void *, const void *, const void *, const int*, const void *, void *, const int*);
void ctrmv_( char*, char*, char*, const int*, const void *, const int*, void *, const int*);
void ctbmv_( char*, char*, char*, const int*, const int*, const void *, const int*, void *, const int*);
void ctpmv_( char*, char*, char*, const int*, const void *, void *, const int*);
void ctrsv_( char*, char*, char*, const int*, const void *, const int*, void *, const int*);
void ctbsv_( char*, char*, char*, const int*, const int*, const void *, const int*, void *, const int*);
void ctpsv_( char*, char*, char*, const int*, const void *, void *,const int*);
void cgerc_( const int*, const int*, const void *, const void *, const int*, const void *, const int*, void *, const int*);
void cgeru_( const int*, const int*, const void *, const void *, const int*, const void *, const int*, void *, const int*);
void cher_(char*, const int*, const float *, const void *, const int*, void *, const int*);
void cher2_(char*, const int*, const void *, const void *, const int*, const void *, const int*, void *, const int*);
void chpr_(char*, const int*, const float *, const void *, const int*, void *);
void chpr2_(char*, const int*, const float *, const void *, const int*, const void *, const int*, void *);
/* Double Complex Precision */
void zgemv_(char*, const int*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void zgbmv_(char*, const int*, const int*, const int*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void zhemv_(char*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void zhbmv_(char*, const int*, const int*, const void *, const void *, const int*, const void *, const int*, const void *, void *, const int*);
void zhpmv_(char*, const int*, const void *, const void *, const void *, const int*, const void *, void *, const int*);
void ztrmv_( char*, char*, char*, const int*, const void *, const int*, void *, const int*);
void ztbmv_( char*, char*, char*, const int*, const int*, const void *, const int*, void *, const int*);
void ztpmv_( char*, char*, char*, const int*, const void *, void *, const int*);
void ztrsv_( char*, char*, char*, const int*, const void *, const int*, void *, const int*);
void ztbsv_( char*, char*, char*, const int*, const int*, const void *, const int*, void *, const int*);
void ztpsv_( char*, char*, char*, const int*, const void *, void *,const int*);
void zgerc_( const int*, const int*, const void *, const void *, const int*, const void *, const int*, void *, const int*);
void zgeru_( const int*, const int*, const void *, const void *, const int*, const void *, const int*, void *, const int*);
void zher_(char*, const int*, const double *, const void *, const int*, void *, const int*);
void zher2_(char*, const int*, const void *, const void *, const int*, const void *, const int*, void *, const int*);
void zhpr_(char*, const int*, const double *, const void *, const int*, void *);
void zhpr2_(char*, const int*, const double *, const void *, const int*, const void *, const int*, void *);
/***********/
/* Level 3 */
/***********/
/* Single Precision */
void sgemm_(char*, char*, const int*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void ssymm_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void ssyrk_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, float *, const int*);
void ssyr2k_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void strmm_(char*, char*, char*, char*, const int*, const int*, const float *, const float *, const int*, float *, const int*);
void strsm_(char*, char*, char*, char*, const int*, const int*, const float *, const float *, const int*, float *, const int*);
/* Double Precision */
void dgemm_(char*, char*, const int*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void dsymm_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void dsyrk_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, double *, const int*);
void dsyr2k_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void dtrmm_(char*, char*, char*, char*, const int*, const int*, const double *, const double *, const int*, double *, const int*);
void dtrsm_(char*, char*, char*, char*, const int*, const int*, const double *, const double *, const int*, double *, const int*);
/* Single Complex Precision */
void cgemm_(char*, char*, const int*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void csymm_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void chemm_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void csyrk_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, float *, const int*);
void cherk_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, float *, const int*);
void csyr2k_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void cher2k_(char*, char*, const int*, const int*, const float *, const float *, const int*, const float *, const int*, const float *, float *, const int*);
void ctrmm_(char*, char*, char*, char*, const int*, const int*, const float *, const float *, const int*, float *, const int*);
void ctrsm_(char*, char*, char*, char*, const int*, const int*, const float *, const float *, const int*, float *, const int*);
/* Double Complex Precision */
void zgemm_(char*, char*, const int*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void zsymm_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void zhemm_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void zsyrk_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, double *, const int*);
void zherk_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, double *, const int*);
void zsyr2k_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void zher2k_(char*, char*, const int*, const int*, const double *, const double *, const int*, const double *, const int*, const double *, double *, const int*);
void ztrmm_(char*, char*, char*, char*, const int*, const int*, const double *, const double *, const int*, double *, const int*);
void ztrsm_(char*, char*, char*, char*, const int*, const int*, const double *, const double *, const int*, double *, const int*);
}
"""
if detect_macos_sdot_bug():
if detect_macos_sdot_bug.fix_works:
header += textwrap.dedent("""\
extern "C" float cblas_sdot(int, float*, int, float*, int);
static float sdot_(int* Nx, float* x, int* Sx, float* y, int* Sy)
{
return cblas_sdot(*Nx, x, *Sx, y, *Sy);
}
""")
else:
# Make sure the buggy version of sdot_ is never used
header += textwrap.dedent("""\
static float sdot_(int* Nx, float* x, int* Sx, float* y, int* Sy)
{
fprintf(stderr,
"FATAL: The implementation of BLAS SDOT "
"routine in your system has a bug that "
"makes it return wrong results.\\n"
"Please contact theano-dev@groups.google.com.\\n"
"You can work around this bug by using a "
"different BLAS library, or disabling BLAS\\n");
assert(0);
}
""")
return header + blas_code
if not config.blas.ldflags:
_logger.warning('Using NumPy C-API based implementation for BLAS functions.')
def mkl_threads_text():
"""C header for MKL threads interface"""
header = """
extern "C"
{
int MKL_Set_Num_Threads_Local(int);
#define mkl_set_num_threads_local MKL_Set_Num_Threads_Local
void MKL_Set_Num_Threads(int);
#define mkl_set_num_threads MKL_Set_Num_Threads
int MKL_Get_Max_Threads(void);
#define mkl_get_max_threads MKL_Get_Max_Threads
int MKL_Domain_Set_Num_Threads(int, int);
#define mkl_domain_set_num_threads MKL_Domain_Set_Num_Threads
int MKL_Domain_Get_Max_Threads(int);
#define mkl_domain_get_max_threads MKL_Domain_Get_Max_Threads
void MKL_Set_Dynamic(int);
#define mkl_set_dynamic MKL_Set_Dynamic
int MKL_Get_Dynamic(void);
#define mkl_get_dynamic MKL_Get_Dynamic
}
"""
return header
def openblas_threads_text():
"""C header for OpenBLAS threads interface"""
header = """
extern "C"
{
void openblas_set_num_threads(int);
void goto_set_num_threads(int);
int openblas_get_num_threads(void);
}
"""
return header
def blas_header_version():
# Version for the base header
version = (9,)
if detect_macos_sdot_bug():
if detect_macos_sdot_bug.fix_works:
# Version with fix
version += (1,)
else:
# Version with error
version += (2,)
return version
def ____gemm_code(check_ab, a_init, b_init):
mod = '%'
return """
const char * error_string = NULL;
int type_num = PyArray_DESCR(_x)->type_num;
int type_size = PyArray_DESCR(_x)->elsize; // in bytes
npy_intp* Nx = PyArray_DIMS(_x);
npy_intp* Ny = PyArray_DIMS(_y);
npy_intp* Nz = PyArray_DIMS(_z);
npy_intp* Sx = PyArray_STRIDES(_x);
npy_intp* Sy = PyArray_STRIDES(_y);
npy_intp* Sz = PyArray_STRIDES(_z);
size_t sx_0, sx_1, sy_0, sy_1, sz_0, sz_1;
int unit = 0;
if (PyArray_NDIM(_x) != 2) goto _dot_execute_fallback;
if (PyArray_NDIM(_y) != 2) goto _dot_execute_fallback;
if (PyArray_NDIM(_z) != 2) goto _dot_execute_fallback;
%(check_ab)s
if ((PyArray_DESCR(_x)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(_x)->type_num != NPY_FLOAT))
goto _dot_execute_fallback;
if ((PyArray_DESCR(_y)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(_y)->type_num != NPY_FLOAT))
goto _dot_execute_fallback;
if ((PyArray_DESCR(_y)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(_y)->type_num != NPY_FLOAT))
goto _dot_execute_fallback;
if ((PyArray_DESCR(_x)->type_num != PyArray_DESCR(_y)->type_num)
||(PyArray_DESCR(_x)->type_num != PyArray_DESCR(_z)->type_num))
goto _dot_execute_fallback;
if ((Nx[0] != Nz[0]) || (Nx[1] != Ny[0]) || (Ny[1] != Nz[1]))
{
error_string = "Input dimensions do not agree";
goto _dot_execute_fail;
}
if ((Sx[0] < 1) || (Sx[1] < 1) || (Sx[0] %(mod)s type_size) || (Sx[1] %(mod)s type_size)
|| (Sy[0] < 1) || (Sy[1] < 1) || (Sy[0] %(mod)s type_size) || (Sy[1] %(mod)s type_size)
|| (Sz[0] < 1) || (Sz[1] < 1) || (Sz[0] %(mod)s type_size) || (Sz[1] %(mod)s type_size))
{
goto _dot_execute_fallback;
}
/*
encode the stride structure of _x,_y,_z into a single integer
*/
unit |= ((Sx[1] == type_size) ? 0x0 : (Sx[0] == type_size) ? 0x1 : 0x2) << 0;
unit |= ((Sy[1] == type_size) ? 0x0 : (Sy[0] == type_size) ? 0x1 : 0x2) << 4;
unit |= ((Sz[1] == type_size) ? 0x0 : (Sz[0] == type_size) ? 0x1 : 0x2) << 8;
/* create appropriate strides for malformed matrices that are row or column
* vectors
*/
sx_0 = (Nx[0] > 1) ? Sx[0]/type_size : Nx[1];
sx_1 = (Nx[1] > 1) ? Sx[1]/type_size : Nx[0];
sy_0 = (Ny[0] > 1) ? Sy[0]/type_size : Ny[1];
sy_1 = (Ny[1] > 1) ? Sy[1]/type_size : Ny[0];
sz_0 = (Nz[0] > 1) ? Sz[0]/type_size : Nz[1];
sz_1 = (Nz[1] > 1) ? Sz[1]/type_size : Nz[0];
switch (type_num)
{
case NPY_FLOAT:
{
#define REAL float
float a = %(a_init)s;
float b = %(b_init)s;
float* x = (float*)PyArray_DATA(_x);
float* y = (float*)PyArray_DATA(_y);
float* z = (float*)PyArray_DATA(_z);
switch(unit)
{
case 0x000: cblas_sgemm(CblasRowMajor, CblasNoTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_0, b, z, sz_0); break;
case 0x001: cblas_sgemm(CblasRowMajor, CblasTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_0, b, z, sz_0); break;
case 0x010: cblas_sgemm(CblasRowMajor, CblasNoTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_1, b, z, sz_0); break;
case 0x011: cblas_sgemm(CblasRowMajor, CblasTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_1, b, z, sz_0); break;
case 0x100: cblas_sgemm(CblasColMajor, CblasTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_0, b, z, sz_1); break;
case 0x101: cblas_sgemm(CblasColMajor, CblasNoTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_0, b, z, sz_1); break;
case 0x110: cblas_sgemm(CblasColMajor, CblasTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_1, b, z, sz_1); break;
case 0x111: cblas_sgemm(CblasColMajor, CblasNoTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_1, b, z, sz_1); break;
default: goto _dot_execute_fallback;
};
#undef REAL
}
break;
case NPY_DOUBLE:
{
#define REAL double
double a = %(a_init)s;
double b = %(b_init)s;
double* x = (double*)PyArray_DATA(_x);
double* y = (double*)PyArray_DATA(_y);
double* z = (double*)PyArray_DATA(_z);
switch(unit)
{
case 0x000: cblas_dgemm(CblasRowMajor, CblasNoTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_0, b, z, sz_0); break;
case 0x001: cblas_dgemm(CblasRowMajor, CblasTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_0, b, z, sz_0); break;
case 0x010: cblas_dgemm(CblasRowMajor, CblasNoTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_1, b, z, sz_0); break;
case 0x011: cblas_dgemm(CblasRowMajor, CblasTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_1, b, z, sz_0); break;
case 0x100: cblas_dgemm(CblasColMajor, CblasTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_0, b, z, sz_1); break;
case 0x101: cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_0, b, z, sz_1); break;
case 0x110: cblas_dgemm(CblasColMajor, CblasTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_0, y, sy_1, b, z, sz_1); break;
case 0x111: cblas_dgemm(CblasColMajor, CblasNoTrans, CblasNoTrans, Nz[0], Nz[1], Nx[1], a, x, sx_1, y, sy_1, b, z, sz_1); break;
default: goto _dot_execute_fallback;
};
#undef REAL
}
break;
}
return 0; //success!
_dot_execute_fallback:
PyErr_SetString(PyExc_NotImplementedError,
"dot->execute() fallback");
return -1;
_dot_execute_fail:
if (error_string == NULL)
PyErr_SetString(PyExc_ValueError,
"dot->execute() cant run on these inputs");
return -1;
/* v 1 */
""" % locals()
| 57.084175 | 186 | 0.585614 | 9,272 | 67,816 | 4.158326 | 0.062446 | 0.208943 | 0.103175 | 0.047204 | 0.813959 | 0.792587 | 0.771553 | 0.75913 | 0.742556 | 0.72243 | 0 | 0.00866 | 0.308673 | 67,816 | 1,187 | 187 | 57.132266 | 0.813728 | 0.025997 | 0 | 0.408635 | 0 | 0.124498 | 0.944497 | 0.035289 | 0 | 0 | 0.001623 | 0 | 0.001004 | 1 | 0.007028 | false | 0 | 0.008032 | 0 | 0.035141 | 0.002008 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
4c3f931ad7dce58834577ada6584790a4611685d | 16,247 | py | Python | L5NeuronSimulation/ShortPlasticity/synapses.py | JuliusvR/L5NeuronSimulation | 1fc68c7367c439e1f9c9b73a15a95609858ec720 | [
"MIT"
] | 2 | 2020-11-12T15:12:31.000Z | 2021-12-09T19:12:55.000Z | L5NeuronSimulation/ShortPlasticity/synapses.py | JuliusvR/L5NeuronSimulation | 1fc68c7367c439e1f9c9b73a15a95609858ec720 | [
"MIT"
] | 2 | 2021-05-20T21:36:12.000Z | 2021-08-29T15:32:35.000Z | L5NeuronSimulation/ShortPlasticity/synapses.py | JuliusvR/L5NeuronSimulation | 1fc68c7367c439e1f9c9b73a15a95609858ec720 | [
"MIT"
] | 6 | 2021-03-03T22:14:39.000Z | 2021-11-23T13:44:35.000Z | import glob
import json
import os
import re
from bmtk.simulator.bionet.pyfunction_cache import add_synapse_model
from neuron import h
import random
import numpy as np
scale = 10
#np.random.seed(3)
weight_means = {"exc": {}, "inh": {}}
generators = []
def lognormal(m, s):
mean = np.log(m) - 0.5 * np.log((s/m)**2+1)
std = np.sqrt(np.log((s/m)**2 + 1))
return max(np.random.lognormal(mean, std, 1), 0.0000000001)
def Bg2Pyr(syn_params, sec_x, sec_id):
"""Create a bg2pyr synapse
:param syn_params: parameters of a synapse
:param sec_x: normalized distance along the section
:param sec_id: target section
:return: NEURON synapse object
"""
lsyn = h.bg2pyr(sec_x, sec=sec_id)
if syn_params.get('initW'):
lsyn.initW = float(syn_params['initW'])
if syn_params.get('taun1'):
lsyn.taun1 = float(syn_params['taun1'])
if syn_params.get('taun2'):
lsyn.taun2 = float(syn_params['taun2'])
if syn_params.get('gNMDAmax'):
lsyn.gNMDAmax = float(syn_params['gNMDAmax'])
if syn_params.get('enmda'):
lsyn.enmda = float(syn_params['enmda'])
if syn_params.get('taua1'):
lsyn.taua1 = float(syn_params['taua1'])
if syn_params.get('taua2'):
lsyn.taua2 = float(syn_params['taua2'])
if syn_params.get('gAMPAmax'):
lsyn.gAMPAmax = float(syn_params['gAMPAmax'])
if syn_params.get('eampa'):
lsyn.eampa = float(syn_params['eampa'])
return lsyn
def bg2pyr(syn_params, xs, secs):
"""Create a list of bg2pyr synapses
:param syn_params: parameters of a synapse
:param xs: list of normalized distances along the section
:param secs: target sections
:return: list of NEURON synpase objects
"""
syns = []
for x, sec in zip(xs, secs):
syn = Pyr2Pyr(syn_params, x, sec)
syns.append(syn)
return syns
def Pyr2Int(syn_params, sec_x, sec_id):
"""Create a pyr2int synapse
:param syn_params: parameters of a synapse
:param sec_x: normalized distance along the section
:param sec_id: target section
:return: NEURON synapse object
"""
lsyn = h.pyr2int(sec_x, sec=sec_id)
if syn_params.get('AlphaTmax_ampa'):
lsyn.AlphaTmax_ampa = float(syn_params['AlphaTmax_ampa']) # par.x(21)
if syn_params.get('Beta_ampa'):
lsyn.Beta_ampa = float(syn_params['Beta_ampa']) # par.x(22)
if syn_params.get('Cdur_ampa'):
lsyn.Cdur_ampa = float(syn_params['Cdur_ampa']) # par.x(23)
if syn_params.get('gbar_ampa'):
lsyn.gbar_ampa = float(syn_params['gbar_ampa']) # par.x(24)
if syn_params.get('Erev_ampa'):
lsyn.Erev_ampa = float(syn_params['Erev_ampa']) # par.x(16)
if syn_params.get('AlphaTmax_nmda'):
lsyn.AlphaTmax_nmda = float(syn_params['AlphaTmax_nmda']) # par.x(25)
if syn_params.get('Beta_nmda'):
lsyn.Beta_nmda = float(syn_params['Beta_nmda']) # par.x(26)
if syn_params.get('Cdur_nmda'):
lsyn.Cdur_nmda = float(syn_params['Cdur_nmda']) # par.x(27)
if syn_params.get('gbar_nmda'):
lsyn.gbar_nmda = float(syn_params['gbar_nmda']) # par.x(28)
if syn_params.get('Erev_nmda'):
lsyn.Erev_nmda = float(syn_params['Erev_nmda']) # par.x(16)
if syn_params.get('initW'):
lsyn.initW = float(syn_params['initW']) * random.uniform(0.5,1.0) # par.x(0) * rC.uniform(0.5,1.0)//rand.normal(0.5,1.5) //`rand.repick()
if syn_params.get('Wmax'):
lsyn.Wmax = float(syn_params['Wmax']) * lsyn.initW # par.x(1) * lsyn.initW
if syn_params.get('Wmin'):
lsyn.Wmin = float(syn_params['Wmin']) * lsyn.initW # par.x(2) * lsyn.initW
#delay = float(syn_params['initW']) # par.x(3) + delayDistance
#lcon = new NetCon(&v(0.5), lsyn, 0, delay, 1)
if syn_params.get('lambda1'):
lsyn.lambda1 = float(syn_params['lambda1']) # par.x(6)
if syn_params.get('lambda2'):
lsyn.lambda2 = float(syn_params['lambda2']) # par.x(7)
if syn_params.get('threshold1'):
lsyn.threshold1 = float(syn_params['threshold1']) # par.x(8)
if syn_params.get('threshold2'):
lsyn.threshold2 = float(syn_params['threshold2']) # par.x(9)
if syn_params.get('tauD1'):
lsyn.tauD1 = float(syn_params['tauD1']) # par.x(10)
if syn_params.get('d1'):
lsyn.d1 = float(syn_params['d1']) # par.x(11)
if syn_params.get('tauD2'):
lsyn.tauD2 = float(syn_params['tauD2']) # par.x(12)
if syn_params.get('d2'):
lsyn.d2 = float(syn_params['d2']) # par.x(13)
if syn_params.get('tauF'):
lsyn.tauF = float(syn_params['tauF']) # par.x(14)
if syn_params.get('f'):
lsyn.f = float(syn_params['f']) # par.x(15)
if syn_params.get('bACH'):
lsyn.bACH = float(syn_params['bACH']) # par.x(17)
if syn_params.get('aDA'):
lsyn.aDA = float(syn_params['aDA']) # par.x(18)
if syn_params.get('bDA'):
lsyn.bDA = float(syn_params['bDA']) # par.x(19)
if syn_params.get('wACH'):
lsyn.wACH = float(syn_params['wACH']) # par.x(20)
return lsyn
def pyr2int(syn_params, xs, secs):
"""Create a list of pyr2int synapses
:param syn_params: parameters of a synapse
:param xs: list of normalized distances along the section
:param secs: target sections
:return: list of NEURON synpase objects
"""
syns = []
for x, sec in zip(xs, secs):
syn = Pyr2Int(syn_params, x, sec)
syns.append(syn)
return syns
def Int2Pyr(syn_params, sec_x, sec_id):
"""Create a int2pyr synapse
:param syn_params: parameters of a synapse
:param sec_x: normalized distance along the section
:param sec_id: target section
:return: NEURON synapse object
"""
# trg_cell_nid = int(str(sec_id).split("[")[1].split("]")[0])
# # if trg_cell_nid > 0:
# # import pdb; pdb.set_trace()
# if trg_cell_nid in weight_means["inh"].keys():
# mean_weight = weight_means["inh"][trg_cell_nid]
# else:
# weight_means["inh"][trg_cell_nid] = mean_weight = np.random.uniform(3.171729 - 1.5, 3.171729 + 0.1)
lsyn = h.int2pyr(sec_x, sec=sec_id)
#Assigns random generator of release probability.
r = h.Random()
r.MCellRan4()
r.uniform(0,1)
lsyn.setRandObjRef(r)
generators.append(r)
h.distance(sec=sec_id.cell().soma[0])
dist = h.distance(sec_id(sec_x))
fullsecname = sec_id.name()
sec_type = fullsecname.split(".")[1][:4]
#######################
# SOM+
# d1: 0.96, tauD1: 40
# PV+
# d1: 0.6, tauD1: 50
#######################
# if sec_type == "soma":
# #PV+
# lsyn.d1 = 0.6
# lsyn.tauD1 = 50
# if sec_type == "dend":
# if dist <= 50:
# #PV+
# lsyn.d1 = 0.6
# lsyn.tauD1 = 50
# else:
# #SOM+
# lsyn.d1 = 0.96
# lsyn.tauD1 = 40
# if sec_type == "apic":
# #SOM+
# lsyn.d1 = 0.96
# lsyn.tauD1 = 40
if syn_params.get('AlphaTmax_ampa'):
lsyn.AlphaTmax_ampa = float(syn_params['AlphaTmax_ampa']) # par.x(21)
if syn_params.get('Beta_ampa'):
lsyn.Beta_ampa = float(syn_params['Beta_ampa']) # par.x(22)
if syn_params.get('Cdur_ampa'):
lsyn.Cdur_ampa = float(syn_params['Cdur_ampa']) # par.x(23)
if syn_params.get('gbar_ampa'):
lsyn.gbar_ampa = float(syn_params['gbar_ampa']) # par.x(24)
if syn_params.get('Erev_ampa'):
lsyn.Erev_ampa = float(syn_params['Erev_ampa']) # par.x(16)
if syn_params.get('AlphaTmax_nmda'):
lsyn.AlphaTmax_nmda = float(syn_params['AlphaTmax_nmda']) # par.x(25)
if syn_params.get('Beta_nmda'):
lsyn.Beta_nmda = float(syn_params['Beta_nmda']) # par.x(26)
if syn_params.get('Cdur_nmda'):
lsyn.Cdur_nmda = float(syn_params['Cdur_nmda']) # par.x(27)
if syn_params.get('gbar_nmda'):
lsyn.gbar_nmda = float(syn_params['gbar_nmda']) # par.x(28)
if syn_params.get('Erev_nmda'):
lsyn.Erev_nmda = float(syn_params['Erev_nmda']) # par.x(16)
if syn_params.get('initW'):
#lsyn.initW = float(syn_params['initW']) * random.uniform(0.5,1.0) # par.x(0) * rC.uniform(0.5,1.0)//rand.normal(0.5,1.5) //`rand.repick()
#lsyn.initW = float(min(lognormal(0.11, 0.05), 5) * scale)
lsyn.initW = 12#float(min(lognormal(0.4, 0.09), 5) * scale)
#float(lognormal(3.171729, 0.5173616067) * scale * 20)
#lsyn.initW = float(lognormal(mean_weight, 0.5173616067) * scale)
#lsyn.initW = min(float(lognormal(mean_weight, 1)), 11) * scale
#lsyn.initW = 3.171729*10
if syn_params.get('Wmax'):
lsyn.Wmax = float(syn_params['Wmax']) * lsyn.initW # par.x(1) * lsyn.initW
if syn_params.get('Wmin'):
lsyn.Wmin = float(syn_params['Wmin']) * lsyn.initW # par.x(2) * lsyn.initW
#delay = float(syn_params['initW']) # par.x(3) + delayDistance
#lcon = new NetCon(&v(0.5), lsyn, 0, delay, 1)
if syn_params.get('lambda1'):
lsyn.lambda1 = float(syn_params['lambda1']) # par.x(6)
if syn_params.get('lambda2'):
lsyn.lambda2 = float(syn_params['lambda2']) # par.x(7)
if syn_params.get('threshold1'):
lsyn.threshold1 = float(syn_params['threshold1']) # par.x(8)
if syn_params.get('threshold2'):
lsyn.threshold2 = float(syn_params['threshold2']) # par.x(9)
if syn_params.get('tauD1'):
lsyn.tauD1 = float(syn_params['tauD1']) # par.x(10)
if syn_params.get('d1'):
lsyn.d1 = float(syn_params['d1']) # par.x(11)
if syn_params.get('tauD2'):
lsyn.tauD2 = float(syn_params['tauD2']) # par.x(12)
if syn_params.get('d2'):
lsyn.d2 = float(syn_params['d2']) # par.x(13)
if syn_params.get('tauF'):
lsyn.tauF = float(syn_params['tauF']) # par.x(14)
if syn_params.get('f'):
lsyn.f = float(syn_params['f']) # par.x(15)
return lsyn
def int2pyr(syn_params, xs, secs):
"""Create a list of int2pyr synapses
:param syn_params: parameters of a synapse
:param xs: list of normalized distances along the section
:param secs: target sections
:return: list of NEURON synpase objects
"""
np.random.seed(3)
syns = []
for x, sec in zip(xs, secs):
syn = Int2Pyr(syn_params, x, sec)
syns.append(syn)
return syns
def Pyr2Pyr(syn_params, sec_x, sec_id):
"""Create a pyr2pyr synapse
:param syn_params: parameters of a synapse
:param sec_x: normalized distance along the section
:param sec_id: target section
:return: NEURON synapse object
"""
# #import pdb; pdb.set_trace()
# trg_cell_nid = int(str(sec_id).split("[")[1].split("]")[0])
# # if trg_cell_nid > 0:
# # import pdb; pdb.set_trace()
# if trg_cell_nid in weight_means["exc"].keys():
# mean_weight = weight_means["exc"][trg_cell_nid]
# else:
# weight_means["exc"][trg_cell_nid] = mean_weight = np.random.uniform(0.18181829517744805 - 0.18, 0.18181829517744805 + 0.28)
# #weight_means["exc"][trg_cell_nid] = mean_weight = np.random.uniform(0.18181829517744805 + 0.32, 0.18181829517744805 + 0.35)
lsyn = h.pyr2pyr(sec_x, sec=sec_id)
#Assigns random generator of release probability.
r = h.Random()
r.MCellRan4()
r.uniform(0,1)
lsyn.setRandObjRef(r)
generators.append(r)
#lsyn.P_0 = 0.1
#import pdb; pdb.set_trace()
if syn_params.get('AlphaTmax_ampa'):
lsyn.AlphaTmax_ampa = float(syn_params['AlphaTmax_ampa']) # par.x(21)
if syn_params.get('Beta_ampa'):
lsyn.Beta_ampa = float(syn_params['Beta_ampa']) # par.x(22)
if syn_params.get('Cdur_ampa'):
lsyn.Cdur_ampa = float(syn_params['Cdur_ampa']) # par.x(23)
if syn_params.get('gbar_ampa'):
lsyn.gbar_ampa = float(syn_params['gbar_ampa']) # par.x(24)
if syn_params.get('Erev_ampa'):
lsyn.Erev_ampa = float(syn_params['Erev_ampa']) # par.x(16)
if syn_params.get('AlphaTmax_nmda'):
lsyn.AlphaTmax_nmda = float(syn_params['AlphaTmax_nmda']) # par.x(25)
if syn_params.get('Beta_nmda'):
lsyn.Beta_nmda = float(syn_params['Beta_nmda']) # par.x(26)
if syn_params.get('Cdur_nmda'):
lsyn.Cdur_nmda = float(syn_params['Cdur_nmda']) # par.x(27)
if syn_params.get('gbar_nmda'):
lsyn.gbar_nmda = float(syn_params['gbar_nmda']) # par.x(28)
if syn_params.get('Erev_nmda'):
lsyn.Erev_nmda = float(syn_params['Erev_nmda']) # par.x(16)
#global max_exc
if syn_params.get('initW'):
#lsyn.initW = float(syn_params['initW']) * random.uniform(0.5,1.0) # par.x(0) * rC.uniform(0.5,1.0)//rand.normal(0.5,1.5) //`rand.repick()
#lsyn.initW = float(min(lognormal(2.5*0.18181829517744805, 0.13993260156705545), 0.8) * scale)
lsyn.initW = 0.5#float(min(lognormal(0.495, 0.09), 0.8) * scale)
#lsyn.initW = 5
# if (lsyn.initW > max_exc):
# max_exc = lsyn.initW
#lsyn.initW = float(lognormal(mean_weight, 0.13993260156705545) * scale)
#lsyn.initW = float(min(lognormal(mean_weight, 0.22), 1.20) * scale)
#lsyn.initW = 0.18181829517744805 * 10
#print(lsyn.initW)
if syn_params.get('Wmax'):
lsyn.Wmax = 8#float(syn_params['Wmax']) * lsyn.initW # par.x(1) * lsyn.initW
if syn_params.get('Wmin'):
lsyn.Wmin = float(syn_params['Wmin']) * lsyn.initW # par.x(2) * lsyn.initW
#delay = float(syn_params['initW']) # par.x(3) + delayDistance
#lcon = new NetCon(&v(0.5), lsyn, 0, delay, 1)
if syn_params.get('lambda1'):
lsyn.lambda1 = float(syn_params['lambda1']) # par.x(6)
if syn_params.get('lambda2'):
lsyn.lambda2 = float(syn_params['lambda2']) # par.x(7)
if syn_params.get('threshold1'):
lsyn.threshold1 = float(syn_params['threshold1']) # par.x(8)
if syn_params.get('threshold2'):
lsyn.threshold2 = float(syn_params['threshold2']) # par.x(9)
if syn_params.get('tauD1'):
lsyn.tauD1 = float(syn_params['tauD1']) # par.x(10)
if syn_params.get('d1'):
lsyn.d1 = float(syn_params['d1']) # par.x(11)
if syn_params.get('tauD2'):
lsyn.tauD2 = float(syn_params['tauD2']) # par.x(12)
if syn_params.get('d2'):
lsyn.d2 = float(syn_params['d2']) # par.x(13)
if syn_params.get('tauF'):
lsyn.tauF = float(syn_params['tauF']) # par.x(14)
if syn_params.get('f'):
lsyn.f = float(syn_params['f']) # par.x(15)
if syn_params.get('bACH'):
lsyn.bACH = float(syn_params['bACH']) # par.x(17)
if syn_params.get('aDA'):
lsyn.aDA = float(syn_params['aDA']) # par.x(18)
if syn_params.get('bDA'):
lsyn.bDA = float(syn_params['bDA']) # par.x(19)
if syn_params.get('wACH'):
lsyn.wACH = float(syn_params['wACH']) # par.x(20)
return lsyn
def pyr2pyr(syn_params, xs, secs):
"""Create a list of pyr2pyr synapses
:param syn_params: parameters of a synapse
:param xs: list of normalized distances along the section
:param secs: target sections
:return: list of NEURON synpase objects
"""
np.random.seed(3)
syns = []
for x, sec in zip(xs, secs):
syn = Pyr2Pyr(syn_params, x, sec)
syns.append(syn)
return syns
def load():
#import pdb; pdb.set_trace()
add_synapse_model(Bg2Pyr, 'bg2pyr', overwrite=False)
add_synapse_model(Bg2Pyr, overwrite=False)
add_synapse_model(Pyr2Pyr, 'pyr2pyr', overwrite=False)
add_synapse_model(Pyr2Pyr, overwrite=False)
add_synapse_model(Pyr2Int, 'pyr2int', overwrite=False)
add_synapse_model(Pyr2Int, overwrite=False)
add_synapse_model(Int2Pyr, 'int2pyr', overwrite=False)
#import pdb; pdb.set_trace()
add_synapse_model(Int2Pyr, overwrite=False)
#import pdb; pdb.set_trace()
return
def syn_params_dicts(syn_dir='../biophys_components/synaptic_models'):
"""
returns: A dictionary of dictionaries containing all
properties in the synapse json files
"""
files = glob.glob(os.path.join(syn_dir,'*.json'))
data = {}
for fh in files:
with open(fh) as f:
data[os.path.basename(fh)] = json.load(f) #data["filename.json"] = {"prop1":"val1",...}
return data
| 37.696056 | 147 | 0.620915 | 2,434 | 16,247 | 3.982744 | 0.089565 | 0.181968 | 0.128533 | 0.124201 | 0.825872 | 0.810708 | 0.775119 | 0.765422 | 0.721993 | 0.714566 | 0 | 0.050604 | 0.215486 | 16,247 | 430 | 148 | 37.783721 | 0.709948 | 0.326891 | 0 | 0.722656 | 0 | 0 | 0.115883 | 0.003523 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042969 | false | 0 | 0.03125 | 0 | 0.117188 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4c6c4d3eda68f087cbecadefb1b247fff7c3f62c | 308 | py | Python | src/sage/manifolds/all.py | bopopescu/sage | 2d495be78e0bdc7a0a635454290b27bb4f5f70f0 | [
"BSL-1.0"
] | 3 | 2019-07-15T13:48:24.000Z | 2019-11-08T12:31:43.000Z | src/sage/manifolds/all.py | bopopescu/sage | 2d495be78e0bdc7a0a635454290b27bb4f5f70f0 | [
"BSL-1.0"
] | null | null | null | src/sage/manifolds/all.py | bopopescu/sage | 2d495be78e0bdc7a0a635454290b27bb4f5f70f0 | [
"BSL-1.0"
] | 1 | 2020-07-23T10:22:38.000Z | 2020-07-23T10:22:38.000Z | from sage.misc.lazy_import import lazy_import
lazy_import('sage.manifolds.manifold', 'Manifold')
lazy_import('sage.manifolds.differentiable.real_line', 'OpenInterval')
lazy_import('sage.manifolds.differentiable.real_line', 'RealLine')
lazy_import('sage.manifolds.differentiable.euclidean', 'EuclideanSpace')
| 51.333333 | 72 | 0.827922 | 37 | 308 | 6.675676 | 0.378378 | 0.242915 | 0.226721 | 0.37247 | 0.51417 | 0.364372 | 0.364372 | 0 | 0 | 0 | 0 | 0 | 0.038961 | 308 | 5 | 73 | 61.6 | 0.834459 | 0 | 0 | 0 | 0 | 0 | 0.590909 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
d5b6297fd9956c49eb3cd4e8d07209bb12155391 | 6,693 | py | Python | moco/model.py | BostonCrayfish/mmsegmentation | e8b87242b877bfe0c32ea2630c2fd08977d7dd4b | [
"Apache-2.0"
] | null | null | null | moco/model.py | BostonCrayfish/mmsegmentation | e8b87242b877bfe0c32ea2630c2fd08977d7dd4b | [
"Apache-2.0"
] | null | null | null | moco/model.py | BostonCrayfish/mmsegmentation | e8b87242b877bfe0c32ea2630c2fd08977d7dd4b | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
'''
Sequential(
(0): ResNet(
(conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
(bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
(maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
(layer1): ResLayer(
(0): Bottleneck(
(conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
(downsample): Sequential(
(0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
(1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
)
(1): Bottleneck(
(conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
)
(2): Bottleneck(
(conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
)
)
(layer2): ResLayer(
(0): Bottleneck(
(conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
(downsample): Sequential(
(0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
(1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
)
(1): Bottleneck(
(conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
)
(2): Bottleneck(
(conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
)
(3): Bottleneck(
(conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
(0): AdaptiveAvgPool2d(output_size=1)
(1): ConvModule(
(conv): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(activate): ReLU(inplace=True)
)
)
(aspp_modules): ASPPModule(
(0): ConvModule(
(conv): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(activate): ReLU(inplace=True)
)
(1): ConvModule(
(conv): Conv2d(2048, 512, kernel_size=(3, 3), stride=(1, 1), padding=(12, 12), dilation=(12, 12), bias=False)
(bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(activate): ReLU(inplace=True)
)
(2): ConvModule(
(conv): Conv2d(2048, 512, kernel_size=(3, 3), stride=(1, 1), padding=(24, 24), dilation=(24, 24), bias=False)
(bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(activate): ReLU(inplace=True)
)
(3): ConvModule(
(conv): Conv2d(2048, 512, kernel_size=(3, 3), stride=(1, 1), padding=(36, 36), dilation=(36, 36), bias=False)
(bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(activate): ReLU(inplace=True)
)
)
(bottleneck): ConvModule(
(conv): Conv2d(2560, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(activate): ReLU(inplace=True)
)
)
)
'''
| 56.243697 | 117 | 0.614821 | 988 | 6,693 | 4.071862 | 0.064777 | 0.026846 | 0.05046 | 0.108128 | 0.920209 | 0.904797 | 0.899577 | 0.899577 | 0.89908 | 0.898335 | 0 | 0.127656 | 0.198267 | 6,693 | 118 | 118 | 56.720339 | 0.622065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
d5d2dc8936cc9e08ad9e7bb36e34b41a62766b66 | 129 | py | Python | notebooks/mass_converters.py | djfedos/djfedos-boostrap | 8a2fb1259c52134f6bcbda53821ab8f3b20e50e4 | [
"Apache-2.0"
] | null | null | null | notebooks/mass_converters.py | djfedos/djfedos-boostrap | 8a2fb1259c52134f6bcbda53821ab8f3b20e50e4 | [
"Apache-2.0"
] | 9 | 2021-11-03T18:57:45.000Z | 2022-03-26T06:29:38.000Z | notebooks/mass_converters.py | djfedos/djfedos-boostrap | 8a2fb1259c52134f6bcbda53821ab8f3b20e50e4 | [
"Apache-2.0"
] | null | null | null | def kg_to_lbs(mass):
mass /= 0.45
return round(mass, 2)
def lbs_to_kg(mass):
mass *= 0.45
return round(mass, 2) | 16.125 | 25 | 0.604651 | 24 | 129 | 3.083333 | 0.416667 | 0.216216 | 0.243243 | 0.297297 | 0.72973 | 0.72973 | 0.72973 | 0.72973 | 0 | 0 | 0 | 0.083333 | 0.255814 | 129 | 8 | 26 | 16.125 | 0.6875 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 10 |
d5e6534e24ce152ba18e8399605078cb74ec72a8 | 32,465 | py | Python | dymos/examples/brachistochrone/test/test_brachistochrone_rk4.py | Kenneth-T-Moore/dymos | 0ae11aab9cb69ac9dd1d784616d1dfe35a6e5b11 | [
"Apache-2.0"
] | null | null | null | dymos/examples/brachistochrone/test/test_brachistochrone_rk4.py | Kenneth-T-Moore/dymos | 0ae11aab9cb69ac9dd1d784616d1dfe35a6e5b11 | [
"Apache-2.0"
] | null | null | null | dymos/examples/brachistochrone/test/test_brachistochrone_rk4.py | Kenneth-T-Moore/dymos | 0ae11aab9cb69ac9dd1d784616d1dfe35a6e5b11 | [
"Apache-2.0"
] | null | null | null | import unittest
class TestBrachistochroneRK4Example(unittest.TestCase):
def test_brachistochrone_forward_shooting(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in ExplicitPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=10)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_backward_shooting(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(-2.0, -0.5))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=False, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in ExplicitPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=0)
phase.add_boundary_constraint('y', loc='final', equals=10)
phase.add_boundary_constraint('v', loc='final', equals=0)
# Minimize time at the end of the phase
phase.add_objective('time', loc='final', scaler=-10)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 1.8016
p['phase0.t_duration'] = -1.8016
p['phase0.states:x'] = 10
p['phase0.states:y'] = 5
p['phase0.states:v'] = 10
p['phase0.controls:theta'] = phase.interpolate(ys=[100.5, 5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], -1.8016, tolerance=1.0E-3)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 0,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 10,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_path_constrained_state(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in ExplicitPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_path_constraint('y', lower=5)
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=1)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.805, tolerance=1.0E-2)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_path_constrained_control(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in ExplicitPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_path_constraint('theta', lower=0.01, upper=110, units='deg')
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=1)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_path_constrained_control_rate(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in RungeKuttaPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_path_constraint('theta_rate', lower=-60, upper=60, units='deg/s')
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=1)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_path_constrained_ode_output(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in ExplicitPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_path_constraint('check', lower=-500, upper=500, shape=(1,), units='m/s')
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=1)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_boundary_constrained_control_rate(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in RungeKuttaPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_boundary_constraint('theta_rate2', loc='final', equals=0, units='deg/s**2')
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=1)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
assert_rel_error(self, p.get_val('phase0.timeseries.control_rates:theta_rate2')[-1, 0], 0,
tolerance=1.0E-6)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_boundary_constrained_design_parameter(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_polynomial_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
units='deg', order=2, lower=0.01, upper=179.9)
#
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in RungeKuttaPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_boundary_constraint('theta_rate2', loc='final', equals=0)
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=1)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.polynomial_controls:theta'][:, 0] = [0.01, 50, 100]
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
assert_rel_error(self,
p.get_val('phase0.timeseries.polynomial_control_rates:theta_rate2')[-1, 0],
0.0,
tolerance=1.0E-9)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_boundary_constrained_ode_output(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_polynomial_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
units='deg', order=1, lower=0.01, upper=179.9)
#
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in RungeKuttaPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_boundary_constraint('check', loc='final', lower=-500, upper=500,
shape=(1,), units='m/s')
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=1)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.polynomial_controls:theta'][:, 0] = [0.01, 100]
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
def test_brachistochrone_forward_shooting_path_constrained_time(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_rel_error
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
p = om.Problem(model=om.Group())
p.driver = om.ScipyOptimizeDriver()
phase = dm.Phase(ode_class=BrachistochroneODE,
transcription=dm.RungeKutta(num_segments=20))
p.model.add_subsystem('phase0', phase)
phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(0.5, 2.0))
phase.add_state('x', rate_source=BrachistochroneODE.states['x']['rate_source'],
units=BrachistochroneODE.states['x']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('y', rate_source=BrachistochroneODE.states['y']['rate_source'],
units=BrachistochroneODE.states['y']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_state('v', rate_source=BrachistochroneODE.states['v']['rate_source'],
targets=BrachistochroneODE.states['v']['targets'],
units=BrachistochroneODE.states['v']['units'],
fix_initial=True, fix_final=False, solve_segments=False)
phase.add_control('theta', targets=BrachistochroneODE.parameters['theta']['targets'],
continuity=True, rate_continuity=True,
units='deg', lower=0.01, upper=179.9)
phase.add_design_parameter('g', targets=BrachistochroneODE.parameters['g']['targets'],
units='m/s**2', opt=False, val=9.80665)
# Final state values can't be controlled with simple bounds in ExplicitPhase,
# so use nonlinear boundary constraints instead.
phase.add_boundary_constraint('x', loc='final', equals=10)
phase.add_boundary_constraint('y', loc='final', equals=5)
phase.add_path_constraint('time', lower=0.0, upper=2.0)
phase.add_path_constraint('time_phase', lower=0.0, upper=2.0)
# Minimize time at the end of the phase
phase.add_objective('time_phase', loc='final', scaler=10)
p.model.linear_solver = om.DirectSolver()
p.setup(check=True)
p['phase0.t_initial'] = 0.0
p['phase0.t_duration'] = 2.0
p['phase0.states:x'] = 0
p['phase0.states:y'] = 10
p['phase0.states:v'] = 0
p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input')
# Solve for the optimal trajectory
p.run_driver()
# Test the results
assert_rel_error(self, p['phase0.time'][-1], 1.8016, tolerance=1.0E-3)
# Generate the explicitly simulated trajectory
exp_out = phase.simulate()
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:x')[-1, 0], 10,
tolerance=1.0E-3)
assert_rel_error(self, exp_out.get_val('phase0.timeseries.states:y')[-1, 0], 5,
tolerance=1.0E-3)
| 44.411765 | 104 | 0.614539 | 3,919 | 32,465 | 4.924471 | 0.039296 | 0.037308 | 0.030468 | 0.029846 | 0.984196 | 0.980465 | 0.978393 | 0.973729 | 0.970154 | 0.970154 | 0 | 0.032545 | 0.25797 | 32,465 | 730 | 105 | 44.472603 | 0.768576 | 0.07907 | 0 | 0.896996 | 0 | 0 | 0.10967 | 0.028457 | 0 | 0 | 0 | 0 | 0.090129 | 1 | 0.021459 | false | 0 | 0.087983 | 0 | 0.111588 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d5eed770acab3a16cb74dce2e4f01996f48e00c1 | 131 | py | Python | src/neuralkg/__init__.py | zjukg/NeuralKG | 9fe97c29496ed08bc72c76e4dd71e72cbfbe5afa | [
"Apache-2.0"
] | 52 | 2022-01-19T09:08:07.000Z | 2022-03-31T07:02:17.000Z | src/neuralkg/__init__.py | zjukg/NeuralKG | 9fe97c29496ed08bc72c76e4dd71e72cbfbe5afa | [
"Apache-2.0"
] | 2 | 2022-03-08T20:47:38.000Z | 2022-03-18T09:52:33.000Z | src/neuralkg/__init__.py | zjukg/NeuralKG | 9fe97c29496ed08bc72c76e4dd71e72cbfbe5afa | [
"Apache-2.0"
] | 10 | 2022-02-28T07:54:14.000Z | 2022-03-23T14:06:33.000Z | from .data import *
from .eval_task import *
from .lit_model import *
from .loss import *
from .model import *
from .utils import * | 21.833333 | 24 | 0.732824 | 20 | 131 | 4.7 | 0.45 | 0.531915 | 0.319149 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175573 | 131 | 6 | 25 | 21.833333 | 0.87037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9108f20b4b0c27489a439153c0c1fbe701c63865 | 28,423 | py | Python | traffic_control/tests/test_additional_sign_real_api.py | City-of-Helsinki/city-infrastructure-platform | c14513a9e54405412085f1047f91ec58b263eac0 | [
"CC0-1.0"
] | 2 | 2020-11-23T22:08:58.000Z | 2022-03-02T13:13:20.000Z | traffic_control/tests/test_additional_sign_real_api.py | City-of-Helsinki/city-infrastructure-platform | c14513a9e54405412085f1047f91ec58b263eac0 | [
"CC0-1.0"
] | 170 | 2019-12-31T13:37:04.000Z | 2022-03-12T14:03:35.000Z | traffic_control/tests/test_additional_sign_real_api.py | City-of-Helsinki/city-infrastructure-platform | c14513a9e54405412085f1047f91ec58b263eac0 | [
"CC0-1.0"
] | 3 | 2020-05-08T05:58:02.000Z | 2022-03-15T16:07:25.000Z | import datetime
import pytest
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from rest_framework import status
from rest_framework_gis.fields import GeoJsonDict
from traffic_control.models import AdditionalSignContentReal, AdditionalSignReal
from .factories import (
add_additional_sign_real_operation,
get_additional_sign_content_real,
get_additional_sign_real,
get_api_client,
get_owner,
get_traffic_control_device_type,
get_traffic_sign_real,
get_user,
)
from .test_base_api_3d import test_point_2_3d
# AdditionalSignReal tests
# ===============================================
@pytest.mark.parametrize("geo_format", ("", "geojson"))
@pytest.mark.django_db
def test__additional_sign_real__list(geo_format):
client = get_api_client()
for owner_name in ["foo", "bar", "baz"]:
asr = get_additional_sign_real(owner=get_owner(name_fi=owner_name))
get_additional_sign_content_real(parent=asr)
response = client.get(
reverse("v1:additionalsignreal-list"), data={"geo_format": geo_format}
)
response_data = response.json()
assert response.status_code == status.HTTP_200_OK
assert response_data["count"] == 3
for result in response_data["results"]:
obj = AdditionalSignReal.objects.get(pk=result["id"])
assert result["content"][0]["id"] == str(obj.content.first().pk)
if geo_format == "geojson":
assert result["location"] == GeoJsonDict(obj.location.json)
else:
assert result["location"] == obj.location.ewkt
@pytest.mark.parametrize("geo_format", ("", "geojson"))
@pytest.mark.django_db
def test__additional_sign_real__detail(geo_format):
client = get_api_client()
asr = get_additional_sign_real()
ascr = get_additional_sign_content_real(parent=asr)
operation_1 = add_additional_sign_real_operation(
asr, operation_date=datetime.date(2020, 11, 5)
)
operation_2 = add_additional_sign_real_operation(
asr, operation_date=datetime.date(2020, 11, 15)
)
operation_3 = add_additional_sign_real_operation(
asr, operation_date=datetime.date(2020, 11, 10)
)
response = client.get(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}),
data={"geo_format": geo_format},
)
response_data = response.json()
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(asr.pk)
assert response_data["parent"] == str(asr.parent.pk)
assert response_data["content"][0]["id"] == str(ascr.pk)
# verify operations are ordered by operation_date
operation_ids = [operation["id"] for operation in response_data["operations"]]
assert operation_ids == [operation_1.id, operation_3.id, operation_2.id]
if geo_format == "geojson":
assert response_data["location"] == GeoJsonDict(asr.location.json)
else:
assert response_data["location"] == asr.location.ewkt
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__create_without_content(admin_user):
"""
Test that AdditionalSignReal API endpoint POST request doesn't raise
validation errors for missing content data and that the sign is created
successfully
"""
client = get_api_client(user=get_user(admin=admin_user))
traffic_sign_real = get_traffic_sign_real()
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
}
response = client.post(reverse("v1:additionalsignreal-list"), data=data)
response_data = response.json()
if admin_user:
assert response.status_code == status.HTTP_201_CREATED
assert AdditionalSignReal.objects.count() == 1
assert AdditionalSignContentReal.objects.count() == 0
asr = AdditionalSignReal.objects.first()
assert response_data["id"] == str(asr.pk)
assert response_data["parent"] == str(data["parent"])
assert response_data["owner"] == str(data["owner"])
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert AdditionalSignReal.objects.count() == 0
assert AdditionalSignContentReal.objects.count() == 0
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__create_with_content(admin_user):
"""
Test that AdditionalSignReal API endpoint POST request creates
AdditionalSignContent instances successfully
"""
client = get_api_client(user=get_user(admin=admin_user))
traffic_sign_real = get_traffic_sign_real()
dt = get_traffic_control_device_type()
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [
{"text": "Test content 1", "order": 1, "device_type": str(dt.pk)},
{"text": "Test content 2", "order": 2, "device_type": str(dt.pk)},
],
}
response = client.post(reverse("v1:additionalsignreal-list"), data=data)
response_data = response.json()
if admin_user:
assert response.status_code == status.HTTP_201_CREATED
assert AdditionalSignReal.objects.count() == 1
asr = AdditionalSignReal.objects.first()
assert response_data["id"] == str(asr.pk)
assert response_data["parent"] == str(data["parent"])
assert response_data["owner"] == str(data["owner"])
assert AdditionalSignContentReal.objects.count() == 2
ascr_1 = asr.content.order_by("order").first()
assert ascr_1.text == "Test content 1"
assert ascr_1.order == 1
assert ascr_1.device_type.pk == dt.pk
ascr_2 = asr.content.order_by("order").last()
assert ascr_2.text == "Test content 2"
assert ascr_2.order == 2
assert ascr_2.device_type.pk == dt.pk
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert AdditionalSignReal.objects.count() == 0
assert AdditionalSignContentReal.objects.count() == 0
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__create_with_content_id(admin_user):
"""
Test that AdditionalSignReal API endpoint POST request raises
an error if any of the content instances have a id defined.
Pre-existing content instances can not be assigned for newly
created additional signs.
"""
client = get_api_client(user=get_user(admin=admin_user))
traffic_sign_real = get_traffic_sign_real()
dt = get_traffic_control_device_type()
ascr = get_additional_sign_content_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [
{
"id": str(ascr.pk),
"text": "Test content",
"order": 1,
"device_type": str(dt.pk),
}
],
}
response = client.post(reverse("v1:additionalsignreal-list"), data=data)
response_data = response.json()
asr = AdditionalSignReal.objects.exclude(pk=ascr.parent.pk).first()
if admin_user:
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response_data == {
"content": [
{
"id": [
(
"Creating new additional sign with pre-existing "
"content instance is not allowed. Content objects "
'must not have "id" defined.'
)
]
}
]
}
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert not asr
assert AdditionalSignContentReal.objects.count() == 1
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__create_with_incomplete_data(admin_user):
"""
Test that AdditionalSignReal API endpoint POST request raises
validation error correctly if required data is missing.
"""
client = get_api_client(user=get_user(admin=admin_user))
traffic_sign_real = get_traffic_sign_real()
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [{"text": "Test content", "order": 1}],
}
response = client.post(reverse("v1:additionalsignreal-list"), data=data)
response_data = response.json()
if admin_user:
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response_data == {
"content": [{"device_type": [_("This field is required.")]}]
}
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert AdditionalSignReal.objects.count() == 0
assert AdditionalSignContentReal.objects.count() == 0
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__update_without_content(admin_user):
"""
Test that AdditionalSignReal API endpoint PUT request update
is successful when content is not defined. Old content should
be deleted.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
get_additional_sign_content_real(parent=asr)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner(name_en="New owner").pk,
}
assert AdditionalSignContentReal.objects.count() == 1
response = client.put(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
if admin_user:
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(asr.pk)
assert response_data["owner"] == str(data["owner"])
assert AdditionalSignContentReal.objects.count() == 0
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert AdditionalSignContentReal.objects.count() == 1
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__update_with_content(admin_user):
"""
Test that AdditionalSignReal API endpoint PUT request replaces
AdditionalSignContentReal instances when content does not have
id defined. A new content instance should be created.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
original_ascr = get_additional_sign_content_real(parent=asr)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [{"text": "New content", "order": 123, "device_type": str(dt.pk)}],
}
response = client.put(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
asr.refresh_from_db()
if admin_user:
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(asr.pk)
assert response_data["owner"] == str(data["owner"])
new_ascr = asr.content.first()
content = response_data["content"][0]
assert content["id"] == str(new_ascr.pk)
assert content["text"] == "New content"
assert content["order"] == 123
assert not AdditionalSignContentReal.objects.filter(
pk=original_ascr.pk
).exists()
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert asr.content.count() == 1
original_ascr.refresh_from_db()
assert original_ascr.parent == asr
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__update_with_content_id(admin_user):
"""
Test that AdditionalSignReal API endpoint PUT request updates
AdditionalSignContent instances successfully when id is defined.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
ascr = get_additional_sign_content_real(parent=asr)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [
{
"id": str(ascr.pk),
"text": "Updated content",
"order": 100,
"device_type": str(dt.pk),
}
],
}
response = client.put(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
asr.refresh_from_db()
ascr.refresh_from_db()
if admin_user:
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(asr.pk)
assert response_data["owner"] == str(data["owner"])
content = response_data["content"][0]
assert content["id"] == str(ascr.pk)
assert content["text"] == "Updated content"
assert content["order"] == 100
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert ascr.text != "Updated text"
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__update_with_unrelated_content_id(admin_user):
"""
Test that AdditionalSignReal API endpoint PUT request raises
validation error if content is not related to the parent
AdditionalSignReal.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
ascr = get_additional_sign_content_real(
parent=get_additional_sign_real(location=test_point_2_3d)
)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [
{
"id": str(ascr.pk),
"text": "Updated content",
"order": 100,
"device_type": str(dt.pk),
}
],
}
response = client.put(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
asr.refresh_from_db()
ascr.refresh_from_db()
if admin_user:
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response_data == {
"content": [
{
"id": [
(
"Updating content instances that do not belong to "
"this additional sign is not allowed."
)
]
}
]
}
assert ascr.parent != asr
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert ascr.text != "Updated text"
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__partial_update_without_content(admin_user):
"""
Test that AdditionalSignReal API endpoint PATCH request update
is successful when content is not defined. Old content should
not be deleted.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
get_additional_sign_content_real(parent=asr)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner(name_en="New owner").pk,
}
assert AdditionalSignContentReal.objects.count() == 1
response = client.patch(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
asr.refresh_from_db()
assert AdditionalSignContentReal.objects.count() == 1
assert asr.content.exists()
if admin_user:
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(asr.pk)
assert response_data["owner"] == str(data["owner"])
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert AdditionalSignContentReal.objects.count() == 1
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__partial_update_with_content(admin_user):
"""
Test that AdditionalSignReal API endpoint PATCH request replaces
AdditionalSignContentReal instances when content does not have
id defined. A new content instance should be created.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
original_ascr = get_additional_sign_content_real(parent=asr)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [{"text": "New content", "order": 123, "device_type": str(dt.pk)}],
}
response = client.patch(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
asr.refresh_from_db()
if admin_user:
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(asr.pk)
assert response_data["owner"] == str(data["owner"])
new_ascr = asr.content.first()
content = response_data["content"][0]
assert content["id"] == str(new_ascr.pk)
assert content["text"] == "New content"
assert content["order"] == 123
assert not AdditionalSignContentReal.objects.filter(
pk=original_ascr.pk
).exists()
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert asr.content.count() == 1
original_ascr.refresh_from_db()
assert original_ascr.parent == asr
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__partial_update_with_content_id(admin_user):
"""
Test that AdditionalSignReal API endpoint PATCH request updates
AdditionalSignContent instances successfully when id is defined.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
ascr = get_additional_sign_content_real(parent=asr)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [
{
"id": str(ascr.pk),
"text": "Updated content",
"order": 100,
"device_type": str(dt.pk),
}
],
}
response = client.patch(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
asr.refresh_from_db()
ascr.refresh_from_db()
if admin_user:
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(asr.pk)
assert response_data["owner"] == str(data["owner"])
content = response_data["content"][0]
assert content["id"] == str(ascr.pk)
assert content["text"] == "Updated content"
assert content["order"] == 100
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert ascr.text != "Updated text"
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__partial_update_with_unrelated_content_id(admin_user):
"""
Test that AdditionalSignReal API endpoint PATCH request raises
validation error if content is not related to the parent
AdditionalSignReal.
"""
client = get_api_client(user=get_user(admin=admin_user))
dt = get_traffic_control_device_type(code="A1234")
asr = get_additional_sign_real()
ascr = get_additional_sign_content_real(
parent=get_additional_sign_real(location=test_point_2_3d)
)
traffic_sign_real = get_traffic_sign_real(device_type=dt)
data = {
"parent": traffic_sign_real.pk,
"location": str(traffic_sign_real.location),
"owner": get_owner().pk,
"content": [
{
"id": str(ascr.pk),
"text": "Updated content",
"order": 100,
"device_type": str(dt.pk),
}
],
}
response = client.patch(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk}), data=data
)
response_data = response.json()
asr.refresh_from_db()
ascr.refresh_from_db()
if admin_user:
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response_data == {
"content": [
{
"id": [
(
"Updating content instances that do not belong to "
"this additional sign is not allowed."
)
]
}
]
}
assert ascr.parent != asr
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert asr.owner != data["owner"]
assert ascr.text != "Updated text"
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_real__delete(admin_user):
user = get_user(admin=admin_user)
client = get_api_client(user=user)
asr = get_additional_sign_real()
response = client.delete(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk})
)
if admin_user:
assert response.status_code == status.HTTP_204_NO_CONTENT
asr.refresh_from_db()
assert not asr.is_active
assert asr.deleted_by == user
assert asr.deleted_at
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
asr.refresh_from_db()
assert asr.is_active
assert not asr.deleted_by
assert not asr.deleted_at
@pytest.mark.django_db
def test__additional_sign_real__soft_deleted_get_404_response():
user = get_user()
client = get_api_client()
asr = get_additional_sign_real()
asr.soft_delete(user)
response = client.get(
reverse("v1:additionalsignreal-detail", kwargs={"pk": asr.pk})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
# AdditionalSignContentReal tests
# ===============================================
@pytest.mark.django_db
def test__additional_sign_content_real__list():
client = get_api_client()
dt = get_traffic_control_device_type(code="H17.1")
for i in range(3):
get_additional_sign_content_real(order=i, device_type=dt)
response = client.get(reverse("v1:additionalsigncontentreal-list"))
response_data = response.json()
assert response.status_code == status.HTTP_200_OK
assert response_data["count"] == 3
for i in range(3):
result = response_data["results"][i]
assert result["order"] == i
assert result["device_type"] == str(dt.pk)
@pytest.mark.django_db
def test__additional_sign_content_real__detail():
client = get_api_client()
dt = get_traffic_control_device_type(code="H17.1")
ascr = get_additional_sign_content_real(device_type=dt)
response = client.get(
reverse("v1:additionalsigncontentreal-detail", kwargs={"pk": ascr.pk})
)
response_data = response.json()
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(ascr.pk)
assert response_data["parent"] == str(ascr.parent.pk)
assert response_data["order"] == 1
assert response_data["text"] == "Content"
assert response_data["device_type"] == str(dt.pk)
assert response_data["created_by"] == str(ascr.created_by.pk)
assert response_data["updated_by"] == str(ascr.updated_by.pk)
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_content_real__create(admin_user):
client = get_api_client(user=get_user(admin=admin_user))
asr = get_additional_sign_real()
dt = get_traffic_control_device_type(code="H17.1")
data = {
"parent": str(asr.pk),
"order": 1,
"text": "Content",
"device_type": str(dt.pk),
}
response = client.post(reverse("v1:additionalsigncontentreal-list"), data=data)
response_data = response.json()
if admin_user:
assert response.status_code == status.HTTP_201_CREATED
assert AdditionalSignContentReal.objects.count() == 1
assert response_data["id"] == str(AdditionalSignContentReal.objects.first().pk)
assert response_data["parent"] == data["parent"]
assert response_data["order"] == data["order"]
assert response_data["text"] == data["text"]
assert response_data["device_type"] == data["device_type"]
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert AdditionalSignContentReal.objects.count() == 0
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_content_real__update(admin_user):
client = get_api_client(user=get_user(admin=admin_user))
ascr = get_additional_sign_content_real()
dt = get_traffic_control_device_type(code="H17.1")
data = {
"parent": get_additional_sign_real(owner=get_owner(name_fi="New owner")).pk,
"text": "Updated content",
"order": 100,
"device_type": str(dt.pk),
}
response = client.put(
reverse("v1:additionalsigncontentreal-detail", kwargs={"pk": ascr.pk}),
data=data,
)
response_data = response.json()
if admin_user:
assert response.status_code == status.HTTP_200_OK
assert response_data["id"] == str(ascr.pk)
assert response_data["parent"] == str(data["parent"])
assert response_data["text"] == data["text"]
assert response_data["order"] == data["order"]
assert response_data["device_type"] == str(data["device_type"])
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
ascr.refresh_from_db()
assert ascr.parent.pk != data["parent"]
assert ascr.text != data["text"]
assert ascr.order != data["order"]
assert ascr.device_type.pk != data["device_type"]
@pytest.mark.parametrize("admin_user", (False, True))
@pytest.mark.django_db
def test__additional_sign_content_real__delete(admin_user):
user = get_user(admin=admin_user)
client = get_api_client(user=user)
ascr = get_additional_sign_content_real()
response = client.delete(
reverse("v1:additionalsigncontentreal-detail", kwargs={"pk": ascr.pk})
)
if admin_user:
assert response.status_code == status.HTTP_204_NO_CONTENT
assert not AdditionalSignContentReal.objects.filter(pk=ascr.pk).exists()
else:
assert response.status_code == status.HTTP_403_FORBIDDEN
assert AdditionalSignContentReal.objects.filter(pk=ascr.pk).exists()
| 36.024081 | 87 | 0.656264 | 3,433 | 28,423 | 5.15118 | 0.057676 | 0.038905 | 0.041563 | 0.050215 | 0.879439 | 0.846528 | 0.827245 | 0.816557 | 0.80734 | 0.775221 | 0 | 0.013601 | 0.226542 | 28,423 | 788 | 88 | 36.069797 | 0.790802 | 0.069029 | 0 | 0.709524 | 0 | 0 | 0.102741 | 0.023251 | 0 | 0 | 0 | 0 | 0.253968 | 1 | 0.033333 | false | 0 | 0.014286 | 0 | 0.047619 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
910b84052fe3c549145e31dcd2aeb7c6bf8f88f3 | 210 | py | Python | sbaas/analysis/analysis_stage02_quantification/__init__.py | SBRG/sbaas | 9df76bbffdd620cf8566744a2b0503935998fbe0 | [
"Apache-2.0"
] | 1 | 2017-05-13T04:35:08.000Z | 2017-05-13T04:35:08.000Z | sbaas/analysis/analysis_stage02_quantification/__init__.py | SBRG/sbaas | 9df76bbffdd620cf8566744a2b0503935998fbe0 | [
"Apache-2.0"
] | null | null | null | sbaas/analysis/analysis_stage02_quantification/__init__.py | SBRG/sbaas | 9df76bbffdd620cf8566744a2b0503935998fbe0 | [
"Apache-2.0"
] | 2 | 2017-02-23T19:32:38.000Z | 2020-01-14T19:13:05.000Z | from .stage02_quantification_query import stage02_quantification_query
from .stage02_quantification_execute import stage02_quantification_execute
from .stage02_quantification_io import stage02_quantification_io | 70 | 74 | 0.933333 | 24 | 210 | 7.666667 | 0.291667 | 0.684783 | 0.407609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.060302 | 0.052381 | 210 | 3 | 75 | 70 | 0.864322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e692e14adfa18b12d8b345166725a807a3c43286 | 1,281 | py | Python | cupytorch/autograd/grad_mode.py | KinglittleQ/cupytorch | fa81ee4a91f886d1855b198968e540b4fa0ca7d8 | [
"MIT"
] | 19 | 2022-01-16T14:10:26.000Z | 2022-03-09T12:34:55.000Z | cupytorch/autograd/grad_mode.py | KinglittleQ/cupytorch | fa81ee4a91f886d1855b198968e540b4fa0ca7d8 | [
"MIT"
] | null | null | null | cupytorch/autograd/grad_mode.py | KinglittleQ/cupytorch | fa81ee4a91f886d1855b198968e540b4fa0ca7d8 | [
"MIT"
] | 2 | 2022-02-16T12:22:05.000Z | 2022-02-21T07:30:24.000Z | from functools import wraps
from typing import Any
grad_enabled = True
def set_grad_enabled(val: bool) -> None:
global grad_enabled
grad_enabled = val
def get_grad_enabled() -> bool:
return grad_enabled
class no_grad:
def __enter__(self) -> None:
self.prev = grad_enabled
set_grad_enabled(False)
def __exit__(self, exc_type: Any, exc_value: Any, exc_tb: Any) -> None:
set_grad_enabled(self.prev)
def __call__(self, func):
@wraps(func)
def no_grad_func(*args, **kwargs):
self.prev = grad_enabled
set_grad_enabled(False)
ret = func(*args, **kwargs)
set_grad_enabled(self.prev)
return ret
return no_grad_func
class enable_grad:
def __enter__(self) -> None:
self.prev = grad_enabled
set_grad_enabled(True)
def __exit__(self, exc_type: Any, exc_value: Any, exc_tb: Any) -> None:
set_grad_enabled(self.prev)
def __call__(self, func):
@wraps(func)
def no_grad_func(*args, **kwargs):
self.prev = grad_enabled
set_grad_enabled(False)
ret = func(*args, **kwargs)
set_grad_enabled(self.prev)
return ret
return no_grad_func
| 22.875 | 75 | 0.613583 | 168 | 1,281 | 4.27381 | 0.202381 | 0.275766 | 0.175487 | 0.10585 | 0.753482 | 0.753482 | 0.753482 | 0.753482 | 0.746518 | 0.746518 | 0 | 0 | 0.295082 | 1,281 | 55 | 76 | 23.290909 | 0.795127 | 0 | 0 | 0.710526 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.263158 | false | 0 | 0.052632 | 0.026316 | 0.5 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e6d8e9e253abd2798911418043268e4a3e4f2608 | 15,693 | py | Python | tests/ut/python/dataset/test_datasets_penn_treebank.py | PowerOlive/mindspore | bda20724a94113cedd12c3ed9083141012da1f15 | [
"Apache-2.0"
] | 1 | 2022-03-05T02:59:21.000Z | 2022-03-05T02:59:21.000Z | tests/ut/python/dataset/test_datasets_penn_treebank.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | null | null | null | tests/ut/python/dataset/test_datasets_penn_treebank.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import pytest
import mindspore.dataset as ds
from mindspore import log as logger
from util import config_get_set_num_parallel_workers, config_get_set_seed
FILE_DIR = '../data/dataset/testPennTreebank'
def test_penn_treebank_dataset_one_file():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='test')
count = 0
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
logger.info("{}".format(i["text"]))
count += 1
assert count == 3
def test_penn_treebank_dataset_train():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='train')
count = 0
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
logger.info("{}".format(i["text"]))
count += 1
assert count == 3
def test_penn_treebank_dataset_valid():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='valid')
count = 0
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
logger.info("{}".format(i["text"]))
count += 1
assert count == 3
def test_penn_treebank_dataset_all_file():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='all')
count = 0
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
logger.info("{}".format(i["text"]))
count += 1
assert count == 9
def test_penn_treebank_dataset_num_samples_none():
"""
Feature: Test PennTreebank Dataset.
Description: read data with no num_samples input.
Expectation: the data is processed successfully.
"""
# Do not provide a num_samples argument, so it would be None by default
data = ds.PennTreebankDataset(FILE_DIR, usage='all')
count = 0
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
logger.info("{}".format(i["text"]))
count += 1
assert count == 9
def test_penn_treebank_dataset_shuffle_false4():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file with shulle is false.
Expectation: the data is processed successfully.
"""
original_num_parallel_workers = config_get_set_num_parallel_workers(4)
original_seed = config_get_set_seed(987)
data = ds.PennTreebankDataset(FILE_DIR, usage='all', shuffle=False)
count = 0
line = [" no it was black friday ",
" does the bank charge a fee for setting up the account ",
" just ahead of them there was a huge fissure ",
" clash twits poetry formulate flip loyalty splash ",
" <unk> the wardrobe was very small in our room ",
" <unk> <unk> the proportion of female workers in this company <unk> <unk> ",
" you pay less for the supermaket's own brands ",
" black white grapes ",
" everyone in our football team is fuming "]
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
strs = i["text"].item().decode("utf8")
assert strs == line[count]
count += 1
assert count == 9
# Restore configuration
ds.config.set_num_parallel_workers(original_num_parallel_workers)
ds.config.set_seed(original_seed)
def test_penn_treebank_dataset_shuffle_false1():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file with shulle is false.
Expectation: the data is processed successfully.
"""
original_num_parallel_workers = config_get_set_num_parallel_workers(1)
original_seed = config_get_set_seed(987)
data = ds.PennTreebankDataset(FILE_DIR, usage='all', shuffle=False)
count = 0
line = [" no it was black friday ",
" clash twits poetry formulate flip loyalty splash ",
" you pay less for the supermaket's own brands ",
" does the bank charge a fee for setting up the account ",
" <unk> the wardrobe was very small in our room ",
" black white grapes ",
" just ahead of them there was a huge fissure ",
" <unk> <unk> the proportion of female workers in this company <unk> <unk> ",
" everyone in our football team is fuming "]
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
strs = i["text"].item().decode("utf8")
assert strs == line[count]
count += 1
assert count == 9
# Restore configuration
ds.config.set_num_parallel_workers(original_num_parallel_workers)
ds.config.set_seed(original_seed)
def test_penn_treebank_dataset_shuffle_files4():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file with shulle is files.
Expectation: the data is processed successfully.
"""
original_num_parallel_workers = config_get_set_num_parallel_workers(4)
original_seed = config_get_set_seed(135)
data = ds.PennTreebankDataset(FILE_DIR, usage='all', shuffle=ds.Shuffle.FILES)
count = 0
line = [" just ahead of them there was a huge fissure ",
" does the bank charge a fee for setting up the account ",
" no it was black friday ",
" <unk> <unk> the proportion of female workers in this company <unk> <unk> ",
" <unk> the wardrobe was very small in our room ",
" clash twits poetry formulate flip loyalty splash ",
" everyone in our football team is fuming ",
" black white grapes ",
" you pay less for the supermaket's own brands "]
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
strs = i["text"].item().decode("utf8")
assert strs == line[count]
count += 1
assert count == 9
# Restore configuration
ds.config.set_num_parallel_workers(original_num_parallel_workers)
ds.config.set_seed(original_seed)
def test_penn_treebank_dataset_shuffle_files1():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file with shulle is files.
Expectation: the data is processed successfully.
"""
original_num_parallel_workers = config_get_set_num_parallel_workers(1)
original_seed = config_get_set_seed(135)
data = ds.PennTreebankDataset(FILE_DIR, usage='all', shuffle=ds.Shuffle.FILES)
count = 0
line = [" just ahead of them there was a huge fissure ",
" <unk> <unk> the proportion of female workers in this company <unk> <unk> ",
" everyone in our football team is fuming ",
" does the bank charge a fee for setting up the account ",
" <unk> the wardrobe was very small in our room ",
" black white grapes ",
" no it was black friday ",
" clash twits poetry formulate flip loyalty splash ",
" you pay less for the supermaket's own brands "]
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
strs = i["text"].item().decode("utf8")
assert strs == line[count]
count += 1
assert count == 9
# Restore configuration
ds.config.set_num_parallel_workers(original_num_parallel_workers)
ds.config.set_seed(original_seed)
def test_penn_treebank_dataset_shuffle_global4():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file with shulle is global.
Expectation: the data is processed successfully.
"""
original_num_parallel_workers = config_get_set_num_parallel_workers(4)
original_seed = config_get_set_seed(246)
data = ds.PennTreebankDataset(FILE_DIR, usage='all', shuffle=ds.Shuffle.GLOBAL)
count = 0
line = [" everyone in our football team is fuming ",
" does the bank charge a fee for setting up the account ",
" clash twits poetry formulate flip loyalty splash ",
" no it was black friday ",
" just ahead of them there was a huge fissure ",
" <unk> <unk> the proportion of female workers in this company <unk> <unk> ",
" you pay less for the supermaket's own brands ",
" <unk> the wardrobe was very small in our room ",
" black white grapes "]
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
strs = i["text"].item().decode("utf8")
assert strs == line[count]
count += 1
assert count == 9
# Restore configuration
ds.config.set_num_parallel_workers(original_num_parallel_workers)
ds.config.set_seed(original_seed)
def test_penn_treebank_dataset_shuffle_global1():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file with shulle is global.
Expectation: the data is processed successfully.
"""
original_num_parallel_workers = config_get_set_num_parallel_workers(1)
original_seed = config_get_set_seed(246)
data = ds.PennTreebankDataset(FILE_DIR, usage='all', shuffle=ds.Shuffle.GLOBAL)
count = 0
line = [" everyone in our football team is fuming ",
" does the bank charge a fee for setting up the account ",
" clash twits poetry formulate flip loyalty splash ",
" <unk> the wardrobe was very small in our room ",
" black white grapes ",
" you pay less for the supermaket's own brands ",
" <unk> <unk> the proportion of female workers in this company <unk> <unk> ",
" no it was black friday ",
" just ahead of them there was a huge fissure "]
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
strs = i["text"].item().decode("utf8")
assert strs == line[count]
count += 1
assert count == 9
# Restore configuration
ds.config.set_num_parallel_workers(original_num_parallel_workers)
ds.config.set_seed(original_seed)
def test_penn_treebank_dataset_num_samples():
"""
Feature: Test PennTreebank Dataset.
Description: Test num_samples.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='all', num_samples=2)
count = 0
for _ in data.create_dict_iterator(num_epochs=1, output_numpy=True):
count += 1
assert count == 2
def test_penn_treebank_dataset_distribution():
"""
Feature: Test PennTreebank Dataset.
Description: read data from a single file.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='all', num_shards=2, shard_id=1)
count = 0
for _ in data.create_dict_iterator(num_epochs=1, output_numpy=True):
count += 1
assert count == 5
def test_penn_treebank_dataset_repeat():
"""
Feature: Test PennTreebank Dataset.
Description: Test repeat.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='test', shuffle=False)
data = data.repeat(3)
count = 0
line = [" no it was black friday ",
" clash twits poetry formulate flip loyalty splash ",
" you pay less for the supermaket's own brands ",
" no it was black friday ",
" clash twits poetry formulate flip loyalty splash ",
" you pay less for the supermaket's own brands ",
" no it was black friday ",
" clash twits poetry formulate flip loyalty splash ",
" you pay less for the supermaket's own brands ",]
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
strs = i["text"].item().decode("utf8")
assert strs == line[count]
count += 1
assert count == 9
def test_penn_treebank_dataset_get_datasetsize():
"""
Feature: Test PennTreebank Dataset.
Description: Test get_datasetsize.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='test')
size = data.get_dataset_size()
assert size == 3
def test_penn_treebank_dataset_to_device():
"""
Feature: Test PennTreebank Dataset.
Description: Test to_device.
Expectation: the data is processed successfully.
"""
data = ds.PennTreebankDataset(FILE_DIR, usage='test')
data = data.to_device()
data.send()
def test_penn_treebank_dataset_exceptions():
"""
Feature: Test PennTreebank Dataset.
Description: Test exceptions.
Expectation: Exception thrown to be caught
"""
with pytest.raises(ValueError) as error_info:
_ = ds.PennTreebankDataset(FILE_DIR, usage='test', num_samples=-1)
assert "num_samples exceeds the boundary" in str(error_info.value)
with pytest.raises(ValueError) as error_info:
_ = ds.PennTreebankDataset("does/not/exist/no.txt")
assert str(error_info.value)
with pytest.raises(ValueError) as error_info:
_ = ds.PennTreebankDataset("")
assert str(error_info.value)
def exception_func(item):
raise Exception("Error occur!")
with pytest.raises(RuntimeError) as error_info:
data = ds.PennTreebankDataset(FILE_DIR)
data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1)
for _ in data.__iter__():
pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(error_info.value)
if __name__ == "__main__":
test_penn_treebank_dataset_one_file()
test_penn_treebank_dataset_train()
test_penn_treebank_dataset_valid()
test_penn_treebank_dataset_all_file()
test_penn_treebank_dataset_num_samples_none()
test_penn_treebank_dataset_shuffle_false4()
test_penn_treebank_dataset_shuffle_false1()
test_penn_treebank_dataset_shuffle_files4()
test_penn_treebank_dataset_shuffle_files1()
test_penn_treebank_dataset_shuffle_global4()
test_penn_treebank_dataset_shuffle_global1()
test_penn_treebank_dataset_num_samples()
test_penn_treebank_dataset_distribution()
test_penn_treebank_dataset_repeat()
test_penn_treebank_dataset_get_datasetsize()
test_penn_treebank_dataset_to_device()
test_penn_treebank_dataset_exceptions()
| 40.761039 | 99 | 0.661441 | 2,014 | 15,693 | 4.950844 | 0.11718 | 0.027279 | 0.054558 | 0.078427 | 0.883462 | 0.846154 | 0.763314 | 0.74476 | 0.74476 | 0.731923 | 0 | 0.009671 | 0.248837 | 15,693 | 385 | 100 | 40.761039 | 0.836189 | 0.196521 | 0 | 0.720833 | 0 | 0 | 0.2613 | 0.004486 | 0 | 0 | 0 | 0 | 0.108333 | 1 | 0.075 | false | 0.004167 | 0.016667 | 0 | 0.091667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e6f315a69c841120068565f79b58684c6b3ba2b5 | 99,398 | py | Python | msgraph/cli/command_modules/mail/azext_mail/generated/custom.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | null | null | null | msgraph/cli/command_modules/mail/azext_mail/generated/custom.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | 22 | 2022-03-29T22:54:37.000Z | 2022-03-29T22:55:27.000Z | msgraph/cli/command_modules/mail/azext_mail/generated/custom.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
def mail_user_create_mail_folder(client,
user_id,
id_=None,
child_folder_count=None,
display_name=None,
parent_folder_id=None,
total_item_count=None,
unread_item_count=None,
child_folders=None,
message_rules=None,
messages=None,
multi_value_extended_properties=None,
single_value_extended_properties=None):
body = {}
body['id'] = id_
body['child_folder_count'] = child_folder_count
body['display_name'] = display_name
body['parent_folder_id'] = parent_folder_id
body['total_item_count'] = total_item_count
body['unread_item_count'] = unread_item_count
body['child_folders'] = child_folders
body['message_rules'] = message_rules
body['messages'] = messages
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
return client.create_mail_folders(user_id=user_id,
body=body)
def mail_user_create_message(client,
user_id,
body,
id_=None,
categories=None,
change_key=None,
created_date_time=None,
last_modified_date_time=None,
bcc_recipients=None,
body_preview=None,
cc_recipients=None,
conversation_id=None,
conversation_index=None,
has_attachments=None,
importance=None,
inference_classification=None,
internet_message_headers=None,
internet_message_id=None,
is_delivery_receipt_requested=None,
is_draft=None,
is_read=None,
is_read_receipt_requested=None,
parent_folder_id=None,
received_date_time=None,
reply_to=None,
sent_date_time=None,
subject=None,
to_recipients=None,
unique_body=None,
web_link=None,
attachments=None,
extensions=None,
multi_value_extended_properties=None,
single_value_extended_properties=None,
email_address=None,
microsoft_graph_email_address=None,
completed_date_time=None,
due_date_time=None,
flag_status=None,
start_date_time=None):
body = {}
body['id'] = id_
body['categories'] = categories
body['change_key'] = change_key
body['created_date_time'] = created_date_time
body['last_modified_date_time'] = last_modified_date_time
body['bcc_recipients'] = bcc_recipients
body['body'] = body
body['body_preview'] = body_preview
body['cc_recipients'] = cc_recipients
body['conversation_id'] = conversation_id
body['conversation_index'] = conversation_index
body['has_attachments'] = has_attachments
body['importance'] = importance
body['inference_classification'] = inference_classification
body['internet_message_headers'] = internet_message_headers
body['internet_message_id'] = internet_message_id
body['is_delivery_receipt_requested'] = is_delivery_receipt_requested
body['is_draft'] = is_draft
body['is_read'] = is_read
body['is_read_receipt_requested'] = is_read_receipt_requested
body['parent_folder_id'] = parent_folder_id
body['received_date_time'] = received_date_time
body['reply_to'] = reply_to
body['sent_date_time'] = sent_date_time
body['subject'] = subject
body['to_recipients'] = to_recipients
body['unique_body'] = unique_body
body['web_link'] = web_link
body['attachments'] = attachments
body['extensions'] = extensions
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
body['sender'] = {}
body['sender']['email_address'] = email_address
body['from_property'] = {}
body['from_property']['email_address'] = microsoft_graph_email_address
body['flag'] = {}
body['flag']['completed_date_time'] = completed_date_time
body['flag']['due_date_time'] = due_date_time
body['flag']['flag_status'] = flag_status
body['flag']['start_date_time'] = start_date_time
return client.create_messages(user_id=user_id,
body=body)
def mail_user_delete_inference_classification(client,
user_id,
if_match=None):
return client.delete_inference_classification(user_id=user_id,
if_match=if_match)
def mail_user_delete_mail_folder(client,
user_id,
mail_folder_id,
if_match=None):
return client.delete_mail_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
if_match=if_match)
def mail_user_delete_message(client,
user_id,
message_id,
if_match=None):
return client.delete_messages(user_id=user_id,
message_id=message_id,
if_match=if_match)
def mail_user_list_mail_folder(client,
user_id,
orderby=None,
select=None,
expand=None):
return client.list_mail_folders(user_id=user_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_list_message(client,
user_id,
orderby=None,
select=None,
expand=None):
return client.list_messages(user_id=user_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_show_inference_classification(client,
user_id,
select=None,
expand=None):
return client.get_inference_classification(user_id=user_id,
select=select,
expand=expand)
def mail_user_show_mail_folder(client,
user_id,
mail_folder_id,
select=None,
expand=None):
return client.get_mail_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
select=select,
expand=expand)
def mail_user_show_message(client,
user_id,
message_id,
select=None,
expand=None):
return client.get_messages(user_id=user_id,
message_id=message_id,
select=select,
expand=expand)
def mail_user_update_inference_classification(client,
user_id,
id_=None,
overrides=None):
body = {}
body['id'] = id_
body['overrides'] = overrides
return client.update_inference_classification(user_id=user_id,
body=body)
def mail_user_update_mail_folder(client,
user_id,
mail_folder_id,
id_=None,
child_folder_count=None,
display_name=None,
parent_folder_id=None,
total_item_count=None,
unread_item_count=None,
child_folders=None,
message_rules=None,
messages=None,
multi_value_extended_properties=None,
single_value_extended_properties=None):
body = {}
body['id'] = id_
body['child_folder_count'] = child_folder_count
body['display_name'] = display_name
body['parent_folder_id'] = parent_folder_id
body['total_item_count'] = total_item_count
body['unread_item_count'] = unread_item_count
body['child_folders'] = child_folders
body['message_rules'] = message_rules
body['messages'] = messages
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
return client.update_mail_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
body=body)
def mail_user_update_message(client,
user_id,
message_id,
body,
id_=None,
categories=None,
change_key=None,
created_date_time=None,
last_modified_date_time=None,
bcc_recipients=None,
body_preview=None,
cc_recipients=None,
conversation_id=None,
conversation_index=None,
has_attachments=None,
importance=None,
inference_classification=None,
internet_message_headers=None,
internet_message_id=None,
is_delivery_receipt_requested=None,
is_draft=None,
is_read=None,
is_read_receipt_requested=None,
parent_folder_id=None,
received_date_time=None,
reply_to=None,
sent_date_time=None,
subject=None,
to_recipients=None,
unique_body=None,
web_link=None,
attachments=None,
extensions=None,
multi_value_extended_properties=None,
single_value_extended_properties=None,
email_address=None,
microsoft_graph_email_address=None,
completed_date_time=None,
due_date_time=None,
flag_status=None,
start_date_time=None):
body = {}
body['id'] = id_
body['categories'] = categories
body['change_key'] = change_key
body['created_date_time'] = created_date_time
body['last_modified_date_time'] = last_modified_date_time
body['bcc_recipients'] = bcc_recipients
body['body'] = body
body['body_preview'] = body_preview
body['cc_recipients'] = cc_recipients
body['conversation_id'] = conversation_id
body['conversation_index'] = conversation_index
body['has_attachments'] = has_attachments
body['importance'] = importance
body['inference_classification'] = inference_classification
body['internet_message_headers'] = internet_message_headers
body['internet_message_id'] = internet_message_id
body['is_delivery_receipt_requested'] = is_delivery_receipt_requested
body['is_draft'] = is_draft
body['is_read'] = is_read
body['is_read_receipt_requested'] = is_read_receipt_requested
body['parent_folder_id'] = parent_folder_id
body['received_date_time'] = received_date_time
body['reply_to'] = reply_to
body['sent_date_time'] = sent_date_time
body['subject'] = subject
body['to_recipients'] = to_recipients
body['unique_body'] = unique_body
body['web_link'] = web_link
body['attachments'] = attachments
body['extensions'] = extensions
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
body['sender'] = {}
body['sender']['email_address'] = email_address
body['from_property'] = {}
body['from_property']['email_address'] = microsoft_graph_email_address
body['flag'] = {}
body['flag']['completed_date_time'] = completed_date_time
body['flag']['due_date_time'] = due_date_time
body['flag']['flag_status'] = flag_status
body['flag']['start_date_time'] = start_date_time
return client.update_messages(user_id=user_id,
message_id=message_id,
body=body)
def mail_user_inference_classification_create_override(client,
user_id,
id_=None,
classify_as=None,
sender_email_address=None):
body = {}
body['id'] = id_
body['classify_as'] = classify_as
body['sender_email_address'] = sender_email_address
return client.create_overrides(user_id=user_id,
body=body)
def mail_user_inference_classification_delete_override(client,
user_id,
inference_classification_override_id,
if_match=None):
return client.delete_overrides(user_id=user_id,
inference_classification_override_id=inference_classification_override_id,
if_match=if_match)
def mail_user_inference_classification_list_override(client,
user_id,
orderby=None,
select=None,
expand=None):
return client.list_overrides(user_id=user_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_inference_classification_show_override(client,
user_id,
inference_classification_override_id,
select=None,
expand=None):
return client.get_overrides(user_id=user_id,
inference_classification_override_id=inference_classification_override_id,
select=select,
expand=expand)
def mail_user_inference_classification_update_override(client,
user_id,
inference_classification_override_id,
id_=None,
classify_as=None,
sender_email_address=None):
body = {}
body['id'] = id_
body['classify_as'] = classify_as
body['sender_email_address'] = sender_email_address
return client.update_overrides(user_id=user_id,
inference_classification_override_id=inference_classification_override_id,
body=body)
def mail_user_mail_folder_create_child_folder(client,
user_id,
mail_folder_id,
id_=None,
child_folder_count=None,
display_name=None,
parent_folder_id=None,
total_item_count=None,
unread_item_count=None,
child_folders=None,
message_rules=None,
messages=None,
multi_value_extended_properties=None,
single_value_extended_properties=None):
body = {}
body['id'] = id_
body['child_folder_count'] = child_folder_count
body['display_name'] = display_name
body['parent_folder_id'] = parent_folder_id
body['total_item_count'] = total_item_count
body['unread_item_count'] = unread_item_count
body['child_folders'] = child_folders
body['message_rules'] = message_rules
body['messages'] = messages
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
return client.create_child_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
body=body)
def mail_user_mail_folder_create_message(client,
user_id,
mail_folder_id,
body,
id_=None,
categories=None,
change_key=None,
created_date_time=None,
last_modified_date_time=None,
bcc_recipients=None,
body_preview=None,
cc_recipients=None,
conversation_id=None,
conversation_index=None,
has_attachments=None,
importance=None,
inference_classification=None,
internet_message_headers=None,
internet_message_id=None,
is_delivery_receipt_requested=None,
is_draft=None,
is_read=None,
is_read_receipt_requested=None,
parent_folder_id=None,
received_date_time=None,
reply_to=None,
sent_date_time=None,
subject=None,
to_recipients=None,
unique_body=None,
web_link=None,
attachments=None,
extensions=None,
multi_value_extended_properties=None,
single_value_extended_properties=None,
email_address=None,
microsoft_graph_email_address=None,
completed_date_time=None,
due_date_time=None,
flag_status=None,
start_date_time=None):
body = {}
body['id'] = id_
body['categories'] = categories
body['change_key'] = change_key
body['created_date_time'] = created_date_time
body['last_modified_date_time'] = last_modified_date_time
body['bcc_recipients'] = bcc_recipients
body['body'] = body
body['body_preview'] = body_preview
body['cc_recipients'] = cc_recipients
body['conversation_id'] = conversation_id
body['conversation_index'] = conversation_index
body['has_attachments'] = has_attachments
body['importance'] = importance
body['inference_classification'] = inference_classification
body['internet_message_headers'] = internet_message_headers
body['internet_message_id'] = internet_message_id
body['is_delivery_receipt_requested'] = is_delivery_receipt_requested
body['is_draft'] = is_draft
body['is_read'] = is_read
body['is_read_receipt_requested'] = is_read_receipt_requested
body['parent_folder_id'] = parent_folder_id
body['received_date_time'] = received_date_time
body['reply_to'] = reply_to
body['sent_date_time'] = sent_date_time
body['subject'] = subject
body['to_recipients'] = to_recipients
body['unique_body'] = unique_body
body['web_link'] = web_link
body['attachments'] = attachments
body['extensions'] = extensions
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
body['sender'] = {}
body['sender']['email_address'] = email_address
body['from_property'] = {}
body['from_property']['email_address'] = microsoft_graph_email_address
body['flag'] = {}
body['flag']['completed_date_time'] = completed_date_time
body['flag']['due_date_time'] = due_date_time
body['flag']['flag_status'] = flag_status
body['flag']['start_date_time'] = start_date_time
return client.create_messages(user_id=user_id,
mail_folder_id=mail_folder_id,
body=body)
def mail_user_mail_folder_create_message_rule(client,
user_id,
mail_folder_id,
id_=None,
display_name=None,
has_error=None,
is_enabled=None,
is_read_only=None,
sequence=None,
body_contains=None,
body_or_subject_contains=None,
categories=None,
from_addresses=None,
has_attachments=None,
header_contains=None,
importance=None,
exceptions_is_approval_request=None,
exceptions_is_automatic_forward=None,
exceptions_is_automatic_reply=None,
exceptions_is_encrypted=None,
exceptions_is_meeting_request=None,
exceptions_is_meeting_response=None,
exceptions_is_non_delivery_report=None,
exceptions_is_permission_controlled=None,
exceptions_is_read_receipt=None,
exceptions_is_signed=None,
exceptions_is_voicemail=None,
message_action_flag=None,
not_sent_to_me=None,
recipient_contains=None,
sender_contains=None,
sensitivity=None,
sent_cc_me=None,
sent_only_to_me=None,
sent_to_addresses=None,
sent_to_me=None,
sent_to_or_cc_me=None,
subject_contains=None,
within_size_range=None,
microsoft_graph_message_rule_predicates_body_contains=None,
microsoft_graph_message_rule_predicates_body_or_subject_contains_body_or_subject_contains=None,
microsoft_graph_message_rule_predicates_categories=None,
microsoft_graph_message_rule_predicates_from_addresses=None,
boolean_has_attachments=None,
microsoft_graph_message_rule_predicates_header_contains=None,
microsoft_graph_importance=None,
is_approval_request=None,
is_automatic_forward=None,
is_automatic_reply=None,
is_encrypted=None,
is_meeting_request=None,
is_meeting_response=None,
is_non_delivery_report=None,
is_permission_controlled=None,
is_read_receipt=None,
is_signed=None,
is_voicemail=None,
microsoft_graph_message_action_flag_message_action_flag=None,
boolean_not_sent_to_me=None,
microsoft_graph_message_rule_predicates_recipient_contains=None,
microsoft_graph_message_rule_predicates_sender_contains=None,
microsoft_graph_sensitivity=None,
boolean_sent_cc_me=None,
boolean_sent_only_to_me=None,
microsoft_graph_message_rule_predicates_sent_to_addresses_sent_to_addresses=None,
boolean_sent_to_me=None,
boolean_sent_to_or_cc_me=None,
microsoft_graph_message_rule_predicates_subject_contains=None,
microsoft_graph_size_range_within_size_range=None,
assign_categories=None,
copy_to_folder=None,
delete=None,
forward_as_attachment_to=None,
forward_to=None,
mark_as_read=None,
mark_importance=None,
move_to_folder=None,
permanent_delete=None,
redirect_to=None,
stop_processing_rules=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['has_error'] = has_error
body['is_enabled'] = is_enabled
body['is_read_only'] = is_read_only
body['sequence'] = sequence
body['exceptions'] = {}
body['exceptions']['body_contains'] = body_contains
body['exceptions']['body_or_subject_contains'] = body_or_subject_contains
body['exceptions']['categories'] = categories
body['exceptions']['from_addresses'] = from_addresses
body['exceptions']['has_attachments'] = has_attachments
body['exceptions']['header_contains'] = header_contains
body['exceptions']['importance'] = importance
body['exceptions']['is_approval_request'] = exceptions_is_approval_request
body['exceptions']['is_automatic_forward'] = exceptions_is_automatic_forward
body['exceptions']['is_automatic_reply'] = exceptions_is_automatic_reply
body['exceptions']['is_encrypted'] = exceptions_is_encrypted
body['exceptions']['is_meeting_request'] = exceptions_is_meeting_request
body['exceptions']['is_meeting_response'] = exceptions_is_meeting_response
body['exceptions']['is_non_delivery_report'] = exceptions_is_non_delivery_report
body['exceptions']['is_permission_controlled'] = exceptions_is_permission_controlled
body['exceptions']['is_read_receipt'] = exceptions_is_read_receipt
body['exceptions']['is_signed'] = exceptions_is_signed
body['exceptions']['is_voicemail'] = exceptions_is_voicemail
body['exceptions']['message_action_flag'] = message_action_flag
body['exceptions']['not_sent_to_me'] = not_sent_to_me
body['exceptions']['recipient_contains'] = recipient_contains
body['exceptions']['sender_contains'] = sender_contains
body['exceptions']['sensitivity'] = sensitivity
body['exceptions']['sent_cc_me'] = sent_cc_me
body['exceptions']['sent_only_to_me'] = sent_only_to_me
body['exceptions']['sent_to_addresses'] = sent_to_addresses
body['exceptions']['sent_to_me'] = sent_to_me
body['exceptions']['sent_to_or_cc_me'] = sent_to_or_cc_me
body['exceptions']['subject_contains'] = subject_contains
body['exceptions']['within_size_range'] = within_size_range
body['conditions'] = {}
body['conditions']['body_contains'] = microsoft_graph_message_rule_predicates_body_contains
body['conditions']['body_or_subject_contains'] = microsoft_graph_message_rule_predicates_body_or_subject_contains_body_or_subject_contains
body['conditions']['categories'] = microsoft_graph_message_rule_predicates_categories
body['conditions']['from_addresses'] = microsoft_graph_message_rule_predicates_from_addresses
body['conditions']['has_attachments'] = boolean_has_attachments
body['conditions']['header_contains'] = microsoft_graph_message_rule_predicates_header_contains
body['conditions']['importance'] = microsoft_graph_importance
body['conditions']['is_approval_request'] = is_approval_request
body['conditions']['is_automatic_forward'] = is_automatic_forward
body['conditions']['is_automatic_reply'] = is_automatic_reply
body['conditions']['is_encrypted'] = is_encrypted
body['conditions']['is_meeting_request'] = is_meeting_request
body['conditions']['is_meeting_response'] = is_meeting_response
body['conditions']['is_non_delivery_report'] = is_non_delivery_report
body['conditions']['is_permission_controlled'] = is_permission_controlled
body['conditions']['is_read_receipt'] = is_read_receipt
body['conditions']['is_signed'] = is_signed
body['conditions']['is_voicemail'] = is_voicemail
body['conditions']['message_action_flag'] = microsoft_graph_message_action_flag_message_action_flag
body['conditions']['not_sent_to_me'] = boolean_not_sent_to_me
body['conditions']['recipient_contains'] = microsoft_graph_message_rule_predicates_recipient_contains
body['conditions']['sender_contains'] = microsoft_graph_message_rule_predicates_sender_contains
body['conditions']['sensitivity'] = microsoft_graph_sensitivity
body['conditions']['sent_cc_me'] = boolean_sent_cc_me
body['conditions']['sent_only_to_me'] = boolean_sent_only_to_me
body['conditions']['sent_to_addresses'] = microsoft_graph_message_rule_predicates_sent_to_addresses_sent_to_addresses
body['conditions']['sent_to_me'] = boolean_sent_to_me
body['conditions']['sent_to_or_cc_me'] = boolean_sent_to_or_cc_me
body['conditions']['subject_contains'] = microsoft_graph_message_rule_predicates_subject_contains
body['conditions']['within_size_range'] = microsoft_graph_size_range_within_size_range
body['actions'] = {}
body['actions']['assign_categories'] = assign_categories
body['actions']['copy_to_folder'] = copy_to_folder
body['actions']['delete'] = delete
body['actions']['forward_as_attachment_to'] = forward_as_attachment_to
body['actions']['forward_to'] = forward_to
body['actions']['mark_as_read'] = mark_as_read
body['actions']['mark_importance'] = mark_importance
body['actions']['move_to_folder'] = move_to_folder
body['actions']['permanent_delete'] = permanent_delete
body['actions']['redirect_to'] = redirect_to
body['actions']['stop_processing_rules'] = stop_processing_rules
return client.create_message_rules(user_id=user_id,
mail_folder_id=mail_folder_id,
body=body)
def mail_user_mail_folder_create_multi_value_extended_property(client,
user_id,
mail_folder_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.create_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
body=body)
def mail_user_mail_folder_create_single_value_extended_property(client,
user_id,
mail_folder_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.create_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
body=body)
def mail_user_mail_folder_delete_child_folder(client,
user_id,
mail_folder_id,
mail_folder_id1,
if_match=None):
return client.delete_child_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
mail_folder_id1=mail_folder_id1,
if_match=if_match)
def mail_user_mail_folder_delete_message(client,
user_id,
mail_folder_id,
message_id,
if_match=None):
return client.delete_messages(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
if_match=if_match)
def mail_user_mail_folder_delete_message_rule(client,
user_id,
mail_folder_id,
message_rule_id,
if_match=None):
return client.delete_message_rules(user_id=user_id,
mail_folder_id=mail_folder_id,
message_rule_id=message_rule_id,
if_match=if_match)
def mail_user_mail_folder_delete_multi_value_extended_property(client,
user_id,
mail_folder_id,
multi_value_legacy_extended_property_id,
if_match=None):
return client.delete_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
if_match=if_match)
def mail_user_mail_folder_delete_single_value_extended_property(client,
user_id,
mail_folder_id,
single_value_legacy_extended_property_id,
if_match=None):
return client.delete_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
if_match=if_match)
def mail_user_mail_folder_list_child_folder(client,
user_id,
mail_folder_id,
orderby=None,
select=None,
expand=None):
return client.list_child_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_list_message(client,
user_id,
mail_folder_id,
orderby=None,
select=None,
expand=None):
return client.list_messages(user_id=user_id,
mail_folder_id=mail_folder_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_list_message_rule(client,
user_id,
mail_folder_id,
orderby=None,
select=None,
expand=None):
return client.list_message_rules(user_id=user_id,
mail_folder_id=mail_folder_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_list_multi_value_extended_property(client,
user_id,
mail_folder_id,
orderby=None,
select=None,
expand=None):
return client.list_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_list_single_value_extended_property(client,
user_id,
mail_folder_id,
orderby=None,
select=None,
expand=None):
return client.list_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_show_child_folder(client,
user_id,
mail_folder_id,
mail_folder_id1,
select=None,
expand=None):
return client.get_child_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
mail_folder_id1=mail_folder_id1,
select=select,
expand=expand)
def mail_user_mail_folder_show_message(client,
user_id,
mail_folder_id,
message_id,
select=None,
expand=None):
return client.get_messages(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
select=select,
expand=expand)
def mail_user_mail_folder_show_message_rule(client,
user_id,
mail_folder_id,
message_rule_id,
select=None,
expand=None):
return client.get_message_rules(user_id=user_id,
mail_folder_id=mail_folder_id,
message_rule_id=message_rule_id,
select=select,
expand=expand)
def mail_user_mail_folder_show_multi_value_extended_property(client,
user_id,
mail_folder_id,
multi_value_legacy_extended_property_id,
select=None,
expand=None):
return client.get_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
select=select,
expand=expand)
def mail_user_mail_folder_show_single_value_extended_property(client,
user_id,
mail_folder_id,
single_value_legacy_extended_property_id,
select=None,
expand=None):
return client.get_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
select=select,
expand=expand)
def mail_user_mail_folder_update_child_folder(client,
user_id,
mail_folder_id,
mail_folder_id1,
id_=None,
child_folder_count=None,
display_name=None,
parent_folder_id=None,
total_item_count=None,
unread_item_count=None,
child_folders=None,
message_rules=None,
messages=None,
multi_value_extended_properties=None,
single_value_extended_properties=None):
body = {}
body['id'] = id_
body['child_folder_count'] = child_folder_count
body['display_name'] = display_name
body['parent_folder_id'] = parent_folder_id
body['total_item_count'] = total_item_count
body['unread_item_count'] = unread_item_count
body['child_folders'] = child_folders
body['message_rules'] = message_rules
body['messages'] = messages
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
return client.update_child_folders(user_id=user_id,
mail_folder_id=mail_folder_id,
mail_folder_id1=mail_folder_id1,
body=body)
def mail_user_mail_folder_update_message(client,
user_id,
mail_folder_id,
message_id,
body,
id_=None,
categories=None,
change_key=None,
created_date_time=None,
last_modified_date_time=None,
bcc_recipients=None,
body_preview=None,
cc_recipients=None,
conversation_id=None,
conversation_index=None,
has_attachments=None,
importance=None,
inference_classification=None,
internet_message_headers=None,
internet_message_id=None,
is_delivery_receipt_requested=None,
is_draft=None,
is_read=None,
is_read_receipt_requested=None,
parent_folder_id=None,
received_date_time=None,
reply_to=None,
sent_date_time=None,
subject=None,
to_recipients=None,
unique_body=None,
web_link=None,
attachments=None,
extensions=None,
multi_value_extended_properties=None,
single_value_extended_properties=None,
email_address=None,
microsoft_graph_email_address=None,
completed_date_time=None,
due_date_time=None,
flag_status=None,
start_date_time=None):
body = {}
body['id'] = id_
body['categories'] = categories
body['change_key'] = change_key
body['created_date_time'] = created_date_time
body['last_modified_date_time'] = last_modified_date_time
body['bcc_recipients'] = bcc_recipients
body['body'] = body
body['body_preview'] = body_preview
body['cc_recipients'] = cc_recipients
body['conversation_id'] = conversation_id
body['conversation_index'] = conversation_index
body['has_attachments'] = has_attachments
body['importance'] = importance
body['inference_classification'] = inference_classification
body['internet_message_headers'] = internet_message_headers
body['internet_message_id'] = internet_message_id
body['is_delivery_receipt_requested'] = is_delivery_receipt_requested
body['is_draft'] = is_draft
body['is_read'] = is_read
body['is_read_receipt_requested'] = is_read_receipt_requested
body['parent_folder_id'] = parent_folder_id
body['received_date_time'] = received_date_time
body['reply_to'] = reply_to
body['sent_date_time'] = sent_date_time
body['subject'] = subject
body['to_recipients'] = to_recipients
body['unique_body'] = unique_body
body['web_link'] = web_link
body['attachments'] = attachments
body['extensions'] = extensions
body['multi_value_extended_properties'] = multi_value_extended_properties
body['single_value_extended_properties'] = single_value_extended_properties
body['sender'] = {}
body['sender']['email_address'] = email_address
body['from_property'] = {}
body['from_property']['email_address'] = microsoft_graph_email_address
body['flag'] = {}
body['flag']['completed_date_time'] = completed_date_time
body['flag']['due_date_time'] = due_date_time
body['flag']['flag_status'] = flag_status
body['flag']['start_date_time'] = start_date_time
return client.update_messages(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
body=body)
def mail_user_mail_folder_update_message_rule(client,
user_id,
mail_folder_id,
message_rule_id,
id_=None,
display_name=None,
has_error=None,
is_enabled=None,
is_read_only=None,
sequence=None,
body_contains=None,
body_or_subject_contains=None,
categories=None,
from_addresses=None,
has_attachments=None,
header_contains=None,
importance=None,
exceptions_is_approval_request=None,
exceptions_is_automatic_forward=None,
exceptions_is_automatic_reply=None,
exceptions_is_encrypted=None,
exceptions_is_meeting_request=None,
exceptions_is_meeting_response=None,
exceptions_is_non_delivery_report=None,
exceptions_is_permission_controlled=None,
exceptions_is_read_receipt=None,
exceptions_is_signed=None,
exceptions_is_voicemail=None,
message_action_flag=None,
not_sent_to_me=None,
recipient_contains=None,
sender_contains=None,
sensitivity=None,
sent_cc_me=None,
sent_only_to_me=None,
sent_to_addresses=None,
sent_to_me=None,
sent_to_or_cc_me=None,
subject_contains=None,
within_size_range=None,
microsoft_graph_message_rule_predicates_body_contains=None,
microsoft_graph_message_rule_predicates_body_or_subject_contains_body_or_subject_contains=None,
microsoft_graph_message_rule_predicates_categories=None,
microsoft_graph_message_rule_predicates_from_addresses=None,
boolean_has_attachments=None,
microsoft_graph_message_rule_predicates_header_contains=None,
microsoft_graph_importance=None,
is_approval_request=None,
is_automatic_forward=None,
is_automatic_reply=None,
is_encrypted=None,
is_meeting_request=None,
is_meeting_response=None,
is_non_delivery_report=None,
is_permission_controlled=None,
is_read_receipt=None,
is_signed=None,
is_voicemail=None,
microsoft_graph_message_action_flag_message_action_flag=None,
boolean_not_sent_to_me=None,
microsoft_graph_message_rule_predicates_recipient_contains=None,
microsoft_graph_message_rule_predicates_sender_contains=None,
microsoft_graph_sensitivity=None,
boolean_sent_cc_me=None,
boolean_sent_only_to_me=None,
microsoft_graph_message_rule_predicates_sent_to_addresses_sent_to_addresses=None,
boolean_sent_to_me=None,
boolean_sent_to_or_cc_me=None,
microsoft_graph_message_rule_predicates_subject_contains=None,
microsoft_graph_size_range_within_size_range=None,
assign_categories=None,
copy_to_folder=None,
delete=None,
forward_as_attachment_to=None,
forward_to=None,
mark_as_read=None,
mark_importance=None,
move_to_folder=None,
permanent_delete=None,
redirect_to=None,
stop_processing_rules=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['has_error'] = has_error
body['is_enabled'] = is_enabled
body['is_read_only'] = is_read_only
body['sequence'] = sequence
body['exceptions'] = {}
body['exceptions']['body_contains'] = body_contains
body['exceptions']['body_or_subject_contains'] = body_or_subject_contains
body['exceptions']['categories'] = categories
body['exceptions']['from_addresses'] = from_addresses
body['exceptions']['has_attachments'] = has_attachments
body['exceptions']['header_contains'] = header_contains
body['exceptions']['importance'] = importance
body['exceptions']['is_approval_request'] = exceptions_is_approval_request
body['exceptions']['is_automatic_forward'] = exceptions_is_automatic_forward
body['exceptions']['is_automatic_reply'] = exceptions_is_automatic_reply
body['exceptions']['is_encrypted'] = exceptions_is_encrypted
body['exceptions']['is_meeting_request'] = exceptions_is_meeting_request
body['exceptions']['is_meeting_response'] = exceptions_is_meeting_response
body['exceptions']['is_non_delivery_report'] = exceptions_is_non_delivery_report
body['exceptions']['is_permission_controlled'] = exceptions_is_permission_controlled
body['exceptions']['is_read_receipt'] = exceptions_is_read_receipt
body['exceptions']['is_signed'] = exceptions_is_signed
body['exceptions']['is_voicemail'] = exceptions_is_voicemail
body['exceptions']['message_action_flag'] = message_action_flag
body['exceptions']['not_sent_to_me'] = not_sent_to_me
body['exceptions']['recipient_contains'] = recipient_contains
body['exceptions']['sender_contains'] = sender_contains
body['exceptions']['sensitivity'] = sensitivity
body['exceptions']['sent_cc_me'] = sent_cc_me
body['exceptions']['sent_only_to_me'] = sent_only_to_me
body['exceptions']['sent_to_addresses'] = sent_to_addresses
body['exceptions']['sent_to_me'] = sent_to_me
body['exceptions']['sent_to_or_cc_me'] = sent_to_or_cc_me
body['exceptions']['subject_contains'] = subject_contains
body['exceptions']['within_size_range'] = within_size_range
body['conditions'] = {}
body['conditions']['body_contains'] = microsoft_graph_message_rule_predicates_body_contains
body['conditions']['body_or_subject_contains'] = microsoft_graph_message_rule_predicates_body_or_subject_contains_body_or_subject_contains
body['conditions']['categories'] = microsoft_graph_message_rule_predicates_categories
body['conditions']['from_addresses'] = microsoft_graph_message_rule_predicates_from_addresses
body['conditions']['has_attachments'] = boolean_has_attachments
body['conditions']['header_contains'] = microsoft_graph_message_rule_predicates_header_contains
body['conditions']['importance'] = microsoft_graph_importance
body['conditions']['is_approval_request'] = is_approval_request
body['conditions']['is_automatic_forward'] = is_automatic_forward
body['conditions']['is_automatic_reply'] = is_automatic_reply
body['conditions']['is_encrypted'] = is_encrypted
body['conditions']['is_meeting_request'] = is_meeting_request
body['conditions']['is_meeting_response'] = is_meeting_response
body['conditions']['is_non_delivery_report'] = is_non_delivery_report
body['conditions']['is_permission_controlled'] = is_permission_controlled
body['conditions']['is_read_receipt'] = is_read_receipt
body['conditions']['is_signed'] = is_signed
body['conditions']['is_voicemail'] = is_voicemail
body['conditions']['message_action_flag'] = microsoft_graph_message_action_flag_message_action_flag
body['conditions']['not_sent_to_me'] = boolean_not_sent_to_me
body['conditions']['recipient_contains'] = microsoft_graph_message_rule_predicates_recipient_contains
body['conditions']['sender_contains'] = microsoft_graph_message_rule_predicates_sender_contains
body['conditions']['sensitivity'] = microsoft_graph_sensitivity
body['conditions']['sent_cc_me'] = boolean_sent_cc_me
body['conditions']['sent_only_to_me'] = boolean_sent_only_to_me
body['conditions']['sent_to_addresses'] = microsoft_graph_message_rule_predicates_sent_to_addresses_sent_to_addresses
body['conditions']['sent_to_me'] = boolean_sent_to_me
body['conditions']['sent_to_or_cc_me'] = boolean_sent_to_or_cc_me
body['conditions']['subject_contains'] = microsoft_graph_message_rule_predicates_subject_contains
body['conditions']['within_size_range'] = microsoft_graph_size_range_within_size_range
body['actions'] = {}
body['actions']['assign_categories'] = assign_categories
body['actions']['copy_to_folder'] = copy_to_folder
body['actions']['delete'] = delete
body['actions']['forward_as_attachment_to'] = forward_as_attachment_to
body['actions']['forward_to'] = forward_to
body['actions']['mark_as_read'] = mark_as_read
body['actions']['mark_importance'] = mark_importance
body['actions']['move_to_folder'] = move_to_folder
body['actions']['permanent_delete'] = permanent_delete
body['actions']['redirect_to'] = redirect_to
body['actions']['stop_processing_rules'] = stop_processing_rules
return client.update_message_rules(user_id=user_id,
mail_folder_id=mail_folder_id,
message_rule_id=message_rule_id,
body=body)
def mail_user_mail_folder_update_multi_value_extended_property(client,
user_id,
mail_folder_id,
multi_value_legacy_extended_property_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.update_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
body=body)
def mail_user_mail_folder_update_single_value_extended_property(client,
user_id,
mail_folder_id,
single_value_legacy_extended_property_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.update_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
body=body)
def mail_user_mail_folder_message_create_attachment(client,
user_id,
mail_folder_id,
message_id,
content_type,
id_=None,
is_inline=None,
last_modified_date_time=None,
name=None,
size=None):
body = {}
body['id'] = id_
body['content_type'] = content_type
body['is_inline'] = is_inline
body['last_modified_date_time'] = last_modified_date_time
body['name'] = name
body['size'] = size
return client.create_attachments(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
body=body)
def mail_user_mail_folder_message_create_extension(client,
user_id,
mail_folder_id,
message_id,
id_=None):
body = {}
body['id'] = id_
return client.create_extensions(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
body=body)
def mail_user_mail_folder_message_create_multi_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.create_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
body=body)
def mail_user_mail_folder_message_create_single_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.create_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
body=body)
def mail_user_mail_folder_message_delete_attachment(client,
user_id,
mail_folder_id,
message_id,
attachment_id,
if_match=None):
return client.delete_attachments(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
attachment_id=attachment_id,
if_match=if_match)
def mail_user_mail_folder_message_delete_extension(client,
user_id,
mail_folder_id,
message_id,
extension_id,
if_match=None):
return client.delete_extensions(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
extension_id=extension_id,
if_match=if_match)
def mail_user_mail_folder_message_delete_multi_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
multi_value_legacy_extended_property_id,
if_match=None):
return client.delete_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
if_match=if_match)
def mail_user_mail_folder_message_delete_single_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
single_value_legacy_extended_property_id,
if_match=None):
return client.delete_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
if_match=if_match)
def mail_user_mail_folder_message_list_attachment(client,
user_id,
mail_folder_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_attachments(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_message_list_extension(client,
user_id,
mail_folder_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_extensions(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_message_list_multi_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_message_list_single_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_mail_folder_message_show_attachment(client,
user_id,
mail_folder_id,
message_id,
attachment_id,
select=None,
expand=None):
return client.get_attachments(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
attachment_id=attachment_id,
select=select,
expand=expand)
def mail_user_mail_folder_message_show_extension(client,
user_id,
mail_folder_id,
message_id,
extension_id,
select=None,
expand=None):
return client.get_extensions(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
extension_id=extension_id,
select=select,
expand=expand)
def mail_user_mail_folder_message_show_multi_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
multi_value_legacy_extended_property_id,
select=None,
expand=None):
return client.get_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
select=select,
expand=expand)
def mail_user_mail_folder_message_show_single_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
single_value_legacy_extended_property_id,
select=None,
expand=None):
return client.get_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
select=select,
expand=expand)
def mail_user_mail_folder_message_update_attachment(client,
user_id,
mail_folder_id,
message_id,
attachment_id,
content_type,
id_=None,
is_inline=None,
last_modified_date_time=None,
name=None,
size=None):
body = {}
body['id'] = id_
body['content_type'] = content_type
body['is_inline'] = is_inline
body['last_modified_date_time'] = last_modified_date_time
body['name'] = name
body['size'] = size
return client.update_attachments(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
attachment_id=attachment_id,
body=body)
def mail_user_mail_folder_message_update_extension(client,
user_id,
mail_folder_id,
message_id,
extension_id,
id_=None):
body = {}
body['id'] = id_
return client.update_extensions(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
extension_id=extension_id,
body=body)
def mail_user_mail_folder_message_update_multi_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
multi_value_legacy_extended_property_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.update_multi_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
body=body)
def mail_user_mail_folder_message_update_single_value_extended_property(client,
user_id,
mail_folder_id,
message_id,
single_value_legacy_extended_property_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.update_single_value_extended_properties(user_id=user_id,
mail_folder_id=mail_folder_id,
message_id=message_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
body=body)
def mail_user_message_create_attachment(client,
user_id,
message_id,
content_type,
id_=None,
is_inline=None,
last_modified_date_time=None,
name=None,
size=None):
body = {}
body['id'] = id_
body['content_type'] = content_type
body['is_inline'] = is_inline
body['last_modified_date_time'] = last_modified_date_time
body['name'] = name
body['size'] = size
return client.create_attachments(user_id=user_id,
message_id=message_id,
body=body)
def mail_user_message_create_extension(client,
user_id,
message_id,
id_=None):
body = {}
body['id'] = id_
return client.create_extensions(user_id=user_id,
message_id=message_id,
body=body)
def mail_user_message_create_multi_value_extended_property(client,
user_id,
message_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.create_multi_value_extended_properties(user_id=user_id,
message_id=message_id,
body=body)
def mail_user_message_create_single_value_extended_property(client,
user_id,
message_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.create_single_value_extended_properties(user_id=user_id,
message_id=message_id,
body=body)
def mail_user_message_delete_attachment(client,
user_id,
message_id,
attachment_id,
if_match=None):
return client.delete_attachments(user_id=user_id,
message_id=message_id,
attachment_id=attachment_id,
if_match=if_match)
def mail_user_message_delete_extension(client,
user_id,
message_id,
extension_id,
if_match=None):
return client.delete_extensions(user_id=user_id,
message_id=message_id,
extension_id=extension_id,
if_match=if_match)
def mail_user_message_delete_multi_value_extended_property(client,
user_id,
message_id,
multi_value_legacy_extended_property_id,
if_match=None):
return client.delete_multi_value_extended_properties(user_id=user_id,
message_id=message_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
if_match=if_match)
def mail_user_message_delete_single_value_extended_property(client,
user_id,
message_id,
single_value_legacy_extended_property_id,
if_match=None):
return client.delete_single_value_extended_properties(user_id=user_id,
message_id=message_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
if_match=if_match)
def mail_user_message_list_attachment(client,
user_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_attachments(user_id=user_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_message_list_extension(client,
user_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_extensions(user_id=user_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_message_list_multi_value_extended_property(client,
user_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_multi_value_extended_properties(user_id=user_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_message_list_single_value_extended_property(client,
user_id,
message_id,
orderby=None,
select=None,
expand=None):
return client.list_single_value_extended_properties(user_id=user_id,
message_id=message_id,
orderby=orderby,
select=select,
expand=expand)
def mail_user_message_show_attachment(client,
user_id,
message_id,
attachment_id,
select=None,
expand=None):
return client.get_attachments(user_id=user_id,
message_id=message_id,
attachment_id=attachment_id,
select=select,
expand=expand)
def mail_user_message_show_extension(client,
user_id,
message_id,
extension_id,
select=None,
expand=None):
return client.get_extensions(user_id=user_id,
message_id=message_id,
extension_id=extension_id,
select=select,
expand=expand)
def mail_user_message_show_multi_value_extended_property(client,
user_id,
message_id,
multi_value_legacy_extended_property_id,
select=None,
expand=None):
return client.get_multi_value_extended_properties(user_id=user_id,
message_id=message_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
select=select,
expand=expand)
def mail_user_message_show_single_value_extended_property(client,
user_id,
message_id,
single_value_legacy_extended_property_id,
select=None,
expand=None):
return client.get_single_value_extended_properties(user_id=user_id,
message_id=message_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
select=select,
expand=expand)
def mail_user_message_update_attachment(client,
user_id,
message_id,
attachment_id,
content_type,
id_=None,
is_inline=None,
last_modified_date_time=None,
name=None,
size=None):
body = {}
body['id'] = id_
body['content_type'] = content_type
body['is_inline'] = is_inline
body['last_modified_date_time'] = last_modified_date_time
body['name'] = name
body['size'] = size
return client.update_attachments(user_id=user_id,
message_id=message_id,
attachment_id=attachment_id,
body=body)
def mail_user_message_update_extension(client,
user_id,
message_id,
extension_id,
id_=None):
body = {}
body['id'] = id_
return client.update_extensions(user_id=user_id,
message_id=message_id,
extension_id=extension_id,
body=body)
def mail_user_message_update_multi_value_extended_property(client,
user_id,
message_id,
multi_value_legacy_extended_property_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.update_multi_value_extended_properties(user_id=user_id,
message_id=message_id,
multi_value_legacy_extended_property_id=multi_value_legacy_extended_property_id,
body=body)
def mail_user_message_update_single_value_extended_property(client,
user_id,
message_id,
single_value_legacy_extended_property_id,
id_=None,
value=None):
body = {}
body['id'] = id_
body['value'] = value
return client.update_single_value_extended_properties(user_id=user_id,
message_id=message_id,
single_value_legacy_extended_property_id=single_value_legacy_extended_property_id,
body=body)
| 55.37493 | 142 | 0.438731 | 7,682 | 99,398 | 5.180942 | 0.023431 | 0.037538 | 0.045226 | 0.050653 | 0.989221 | 0.983643 | 0.976332 | 0.97098 | 0.955201 | 0.939623 | 0 | 0.000183 | 0.506399 | 99,398 | 1,794 | 143 | 55.405797 | 0.811018 | 0.00503 | 0 | 0.93321 | 0 | 0 | 0.07361 | 0.014825 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05133 | false | 0 | 0.012369 | 0.030921 | 0.115028 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e6fc64acf6914ad9a67d6d1db4fc5a83a220ce4a | 75,771 | py | Python | dojo/unittests/test_deduplication_logic.py | joebasirico/django-DefectDojo | da4ab0033329cee4108630e2b7e54a2d073932b6 | [
"BSD-3-Clause"
] | 2 | 2021-09-19T23:19:12.000Z | 2022-03-26T10:20:49.000Z | dojo/unittests/test_deduplication_logic.py | joebasirico/django-DefectDojo | da4ab0033329cee4108630e2b7e54a2d073932b6 | [
"BSD-3-Clause"
] | 167 | 2021-03-15T13:49:54.000Z | 2022-03-31T09:10:30.000Z | dojo/unittests/test_deduplication_logic.py | Security-Phoenix-demo/django-DefectDojo | 645778824ed1db71405343f2d2a6e05a4156c899 | [
"BSD-3-Clause"
] | 4 | 2016-09-19T17:33:39.000Z | 2018-12-10T07:55:45.000Z | from django.test import TestCase
from dojo.models import Finding, User, Product, Endpoint, Endpoint_Status, Test, Engagement
from dojo.models import System_Settings
from crum import impersonate
import logging
logger = logging.getLogger(__name__)
deduplicationLogger = logging.getLogger("dojo.specific-loggers.deduplication")
# things to consider:
# - cross scanner deduplication is still flaky as if some scanners don't provide severity, but another doesn, the hashcode will be different so no deduplication happens.
# so I couldn't create any good tests
# - hash_code is only calculated once and never changed. should we add a feature to run dedupe when somebody modifies a finding? bulk edit action to trigger dedupe?
# - deduplication is using the default ordering for findings, so most of the time this means a new finding will be marked as duplicate of the most recent existing finding
# that matches the criteria. I thinkg it would be better to consider the oldest existing findings first? Otherwise we have the chance that an old finding becomes
# marked as duplicate of a newer one at some point.
# - legacy: if file_path and line or both empty and there are no endpoints, no dedupe will happen. Is this desirable or a BUG?
# - DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE should:
# - try to match on uniquer_id first before falling back to hash_Code. Currently it just takes the first finding it can find
# that mathces either the hash_code or unique id.
# - If the unique_id does NOT match, the finding is still considered for dedupe if the hash_code matches. We may need to forbid as the unique_id should be leading for the same test_type
# false positive history observations:
# - doesn't respect dedupe_on_engagement
# - if endpoints are mismatching, it falls back to comparing just the title + test_type or cwe + test_type. this leads to false positive false positives (pung intended)
# - I think this feature should be resdesigned and use the dedupe algo to find "identical/similar findings" to copy false_p status from
# test data summary
# product 1: Python How-to
# engagement 2: April monthly engagement (dedupe_inside: True)
# test 13: ZAP Scan (algo=hash_code, dynamic=True)
# no findings
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# product 2: Security How-to
# engagement 1: 1st Quarter Engagement (dedupe_inside: True)
# test 3: ZAP Scan (algo=hash_code, dynamic=True)
# findings:
# 2 : "High Impact Test Fin": High : act: True : ver: True : mit: False: dup: False: dup_id: None: hash_code: 8cba854f0b70f8a25064952402fbe30c728c0017b83c245d786366956044e0bf: eps: 0: notes: []: uid: None
# 3 : "High Impact Test Fin": High : act: True : ver: True : mit: False: dup: True : dup_id: 2 : hash_code: 8cba854f0b70f8a25064952402fbe30c728c0017b83c245d786366956044e0bf: eps: 0: notes: []: uid: None
# 4 : "High Impact Test Fin": High : act: True : ver: True : mit: False: dup: True : dup_id: 2 : hash_code: 8cba854f0b70f8a25064952402fbe30c728c0017b83c245d786366956044e0bf: eps: 0: notes: []: uid: None
# 5 : "High Impact Test Fin": High : act: True : ver: True : mit: False: dup: True : dup_id: 2 : hash_code: 8cba854f0b70f8a25064952402fbe30c728c0017b83c245d786366956044e0bf: eps: 0: notes: []: uid: None
# 6 : "High Impact Test Fin": High : act: True : ver: True : mit: False: dup: True : dup_id: 2 : hash_code: 8cba854f0b70f8a25064952402fbe30c728c0017b83c245d786366956044e0bf: eps: 0: notes: []: uid: None
# 7 : "DUMMY FINDING ": High : act: False: ver: False: mit: False: dup: False: dup_id: None: hash_code: c89d25e445b088ba339908f68e15e3177b78d22f3039d1bfea51c4be251bf4e0: eps: 0: notes: [1]: uid: None
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# test 14: ZAP Scan (algo=hash_code, dynamic=True)
# no findings
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# engagement 4: April monthly engagement (dedupe_inside: True)
# test 4: ZAP Scan (algo=hash_code, dynamic=True)
# no findings
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# engagement 5: April monthly engagement (dedupe_inside: True)
# test 55: Checkmarx Scan detailed (algo=unique_id_from_tool, dynamic=False)
# findings:
# 124 : "Low Impact Test Find": Low : act: True : ver: True : mit: False: dup: False: dup_id: None: hash_code: 5f272ec29b29d56ca08eba26435bdb225ae4956812c10fce872a6143b73474ba: eps: 0: notes: []: uid: 12345
# 125 : "Low Impact Test Find": Low : act: True : ver: True : mit: False: dup: True : dup_id: None: hash_code: 5f272ec29b29d56ca08eba26435bdb225ae4956812c10fce872a6143b73474ba: eps: 0: notes: []: uid: 12345
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# test 66: Checkmarx Scan detailed (algo=unique_id_from_tool, dynamic=False)
# no findings
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# test 77: Veracode Scan (algo=unique_id_from_tool_or_hash_code, dynamic=False)
# findings:
# 224 : "UID Impact Test Find": Low : act: True : ver: True : mit: False: dup: False: dup_id: None: hash_code: 3c5e9d1ec77aea19dd2bbf3aa51f585fc0da876174be8cac885966db1271f147: eps: 0: notes: []: uid: 6789
# 225 : "UID Impact Test Find": Low : act: True : ver: True : mit: False: dup: True : dup_id: 224 : hash_code: 3c5e9d1ec77aea19dd2bbf3aa51f585fc0da876174be8cac885966db1271f147: eps: 0: notes: []: uid: 6789
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# test 88: Veracode Scan (algo=unique_id_from_tool_or_hash_code, dynamic=False)
# no findings
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# engagement 6: April monthly engagement (dedupe_inside: True)
# engagement 3: weekly engagement (dedupe_inside: True)
# test 33: Generic Findings Import (algo=legacy, dynamic=False)
# findings:
# 22 : "Low Impact Test Find": Low : act: True : ver: True : mit: False: dup: False: dup_id: None: hash_code: 5f272ec29b29d56ca08eba26435bdb225ae4956812c10fce872a6143b73474ba: eps: 0: notes: []: uid: None
# 23 : "Low Impact Test Find": Low : act: True : ver: True : mit: False: dup: True : dup_id: 22 : hash_code: 5f272ec29b29d56ca08eba26435bdb225ae4956812c10fce872a6143b73474ba: eps: 0: notes: []: uid: None
# 24 : "Low Impact Test Find": Low : act: True : ver: True : mit: False: dup: True : dup_id: 22 : hash_code: 5f272ec29b29d56ca08eba26435bdb225ae4956812c10fce872a6143b73474ba: eps: 0: notes: []: uid: None
# endpoints
# 2: ftp://localhost/
# 1: http://127.0.0.1/endpoint/420/edit/
# 3: ssh:127.0.1
# endpoint statuses
# 1: dojo.Endpoint.None dojo.Finding.None 1 2020-07-01 00:00:00+00:00 2020-07-01 17:45:39.791907+00:00 False None None False False False ftp://localhost/ High Impact Test Finding
# product 3: Security Podcast
class TestDuplicationLogic(TestCase):
fixtures = ['dojo_testdata.json']
def run(self, result=None):
testuser = User.objects.get(username='admin')
testuser.usercontactinfo.block_execution = True
testuser.save()
# unit tests are running without any user, which will result in actions like dedupe happening in the celery process
# this doesn't work in unittests as unittests are using an in memory sqlite database and celery can't see the data
# so we're running the test under the admin user context and set block_execution to True
with impersonate(testuser):
super().run(result)
def setUp(self):
logger.debug('enabling deduplication')
self.enable_dedupe()
self.log_summary()
def tearDown(self):
# some test disable dedupe, always reenable
self.enable_dedupe()
self.log_summary()
# self.log_summary(test=33)
# self.log_summary(product=2)
# all engagements in the test data have deduplication_on_engagement set to true
# legacy algo: findings 23, 24, 25 in test 33 are scan_Type Generic Findings Import which uses the legacy algo
def test_identical_legacy(self):
# 24 is already a duplicate of 22 let's see what happens if we create an identical finding (but reset status)
# expect: marked as duplicate
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=True, duplicate_finding_id=finding_24.duplicate_finding.id, hash_code=finding_24.hash_code)
def test_identical_ordering_legacy(self):
finding_22 = Finding.objects.get(id=22)
# 23 is already a duplicate of 22, but let's reset it's status. then update 24 and see if it gets marked as duplicate of 22 or 23
# expect: marked as duplicate of 23 as 23 is older (date field on finding)
# but feature or BUG? it will get marked as duplicate of 22 as it becomes earlier in the findings list (or ordering is by date desc)
finding_23 = Finding.objects.get(id=23)
finding_23.duplicate = False
finding_23.duplicate_finding = None
finding_23.active = True
finding_23.save(dedupe_option=False)
self.assert_finding(finding_23, duplicate=False, hash_code=finding_22.hash_code)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.save()
self.assert_finding(finding_new, not_pk=22, duplicate=True, duplicate_finding_id=finding_22.id, hash_code=finding_22.hash_code)
# self.assert_finding(finding_new, not_pk=22, duplicate=True, duplicate_finding_id=finding_23.id, hash_code=finding_22.hash_code)
def test_identical_except_title_legacy(self):
# 24 is already a duplicate of 22, let's see what happens if we create an identical finding with different title (and reset status)
# expect: NOT marked as duplicate as title is part of hash_code calculation
finding_new, finding_4 = self.copy_and_reset_finding(id=4)
finding_new.title = 'the best title'
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=False, not_hash_code=finding_4.hash_code)
def test_identical_except_description_legacy(self):
# 24 is already a duplicate of 22, let's see what happens if we create an identical finding with different description (and reset status)
# expect: not marked as duplicate as legacy sees description as leading for hash_code
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.description = 'useless finding'
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=False, not_hash_code=finding_24.hash_code)
def test_identical_except_line_legacy(self):
# 24 is already a duplicate of 22, let's see what happens if we create an identical finding with different line (and reset status)
# expect: not marked as duplicate
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.line = 666
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=False, not_hash_code=finding_24.hash_code)
def test_identical_except_filepath_legacy(self):
# 24 is already a duplicate of 22, let's see what happens if we create an identical finding with different file_path (and reset status)
# expect: not marked as duplicate
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.file_path = '/dev/null'
finding_22 = Finding.objects.get(id=22)
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=False, not_hash_code=finding_24.hash_code)
def test_dedupe_inside_engagement_legacy(self):
# finding 2 in engagement 1
# make a copy and store it in engagement 2, test 4
# should not result in being marked as duplicate as it crosses engagement boundaries
# both test 3 and 4 are ZAP scans (cross scanner dedupe is still not working very well)
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
# create new engagment + test in same product
test_new, eng_new = self.create_new_test_and_engagment_from_finding(finding_22)
finding_new.test = test_new
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=22, duplicate=False, hash_code=finding_22.hash_code)
def test_dedupe_not_inside_engagement_legacy(self):
# finding 2 in engagement 1
# make a copy and store it in engagement 2, test 4
# should result in being marked as duplicate as dedupe inside engagement is set to False
# both test 3 and 4 are ZAP scans (cross scanner dedupe is still not working very well)
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
# dedupe_inside_engagment must be false before cloning engagement
self.set_dedupe_inside_engagement(False)
# create new engagment + test in same product
test_new, eng_new = self.create_new_test_and_engagment_from_finding(finding_22)
finding_new.test = test_new
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=22, duplicate=True, duplicate_finding_id=22, hash_code=finding_22.hash_code)
# legacy: if file_path and line or both empty and there are no endpoints, no dedupe will happen. Is this desirable or a BUG?
def test_identical_no_filepath_no_line_no_endpoints_legacy(self):
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.file_path = None
finding_new.line = None
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=22, duplicate=False)
def test_identical_legacy_with_identical_endpoints_static(self):
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24, static=True, dynamic=False) # has myhost.com, myhost2.com
finding_new.save()
# create an identical copy of the new finding with the same endpoints. it should be marked as duplicate
finding_new2, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new2.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new2.test.engagement.product, finding=finding_new2, host="myhost.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new2.test.engagement.product, finding=finding_new2, host="myhost2.com", protocol="https")
ep2.save()
finding_new2.endpoints.add(ep1)
finding_new2.endpoints.add(ep2)
finding_new2.save()
self.assert_finding(finding_new2, not_pk=finding_new.pk, duplicate=True, duplicate_finding_id=finding_new.id, hash_code=finding_new.hash_code, not_hash_code=finding_24.hash_code)
def test_identical_legacy_extra_endpoints_static(self):
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24, static=True, dynamic=False) # has myhost.com, myhost2.com
finding_new.save()
# create a new finding with 3 endpoints (so 1 extra)
finding_new3, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new3.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost2.com", protocol="https")
ep2.save()
ep3 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost3.com", protocol="https")
ep3.save()
finding_new3.endpoints.add(ep1)
finding_new3.endpoints.add(ep2)
finding_new3.endpoints.add(ep3)
finding_new3.save()
# expect: marked as duplicate as the requirement for static findings is that the new finding has to contain all the endpoints of the existing finding (extra is no problem)
# hash_code not affected by endpoints
self.assert_finding(finding_new3, not_pk=finding_new.pk, duplicate=True, duplicate_finding_id=finding_new.id, hash_code=finding_new.hash_code, not_hash_code=finding_24.hash_code)
def test_identical_legacy_different_endpoints_static(self):
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24, static=True, dynamic=False) # has myhost.com, myhost2.com
finding_new.save()
# create an identical copy of the new finding, but with different endpoints
finding_new3, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new3.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost4.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost2.com", protocol="https")
ep2.save()
finding_new3.endpoints.add(ep1)
finding_new3.endpoints.add(ep2)
finding_new3.save()
# expect: not marked as duplicate as the requirement for static findings is that the new finding has to contain all the endpoints of the existing finding and this is not met
# hash_code not affected by endpoints
self.assert_finding(finding_new3, not_pk=finding_new.pk, duplicate=False, hash_code=finding_new.hash_code, not_hash_code=finding_24.hash_code)
def test_identical_legacy_no_endpoints_static(self):
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24, static=True, dynamic=False) # has myhost.com, myhost2.com
finding_new.save()
# create an identical copy of the new finding, but with 1 extra endpoint. should not be marked as duplicate
finding_new3, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new3.save(dedupe_option=False)
finding_new3.save()
# expect not marked as duplicate as the new finding doesn't have endpoints and we don't have filepath/line
self.assert_finding(finding_new3, not_pk=finding_new.pk, duplicate=False, hash_code=finding_new.hash_code, not_hash_code=finding_24.hash_code)
def test_identical_legacy_with_identical_endpoints_dynamic(self):
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24, static=True, dynamic=False) # has myhost.com, myhost2.com
finding_new.save()
# create an identical copy of the new finding. it should be marked as duplicate
finding_new2, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new2.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new2.test.engagement.product, finding=finding_new2, host="myhost.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new2.test.engagement.product, finding=finding_new2, host="myhost2.com", protocol="https")
ep2.save()
finding_new2.endpoints.add(ep1)
finding_new2.endpoints.add(ep2)
finding_new2.save()
self.assert_finding(finding_new2, not_pk=finding_new.pk, duplicate=True, duplicate_finding_id=finding_new.id, hash_code=finding_new.hash_code, not_hash_code=finding_24.hash_code)
def test_identical_legacy_extra_endpoints_dynamic(self):
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24)
finding_new.save()
# create an identical copy of the new finding, but with 1 extra endpoint.
finding_new3, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new3.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost2.com", protocol="https")
ep2.save()
ep3 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost3.com", protocol="https")
ep3.save()
finding_new3.endpoints.add(ep1)
finding_new3.endpoints.add(ep2)
finding_new3.endpoints.add(ep3)
finding_new3.save()
# expect: not marked as duplicate, hash_code affected by endpoints
self.assert_finding(finding_new3, not_pk=finding_new.pk, duplicate=False, not_hash_code=finding_new.hash_code)
def test_identical_legacy_different_endpoints_dynamic(self):
# this test is using the pattern currently in use in the import / serializers.py.
# - save finding first with dedupe-false
# - add endpoints
# - safe finding again with endpoints attached, dedupe=True (default) -> hash_code gets computed
# create a new finding with 3 endpoints (so 1 extra)
# expect: not marked as duplicate as endpoints need to be 100% equal for dynamic findings (host+port)
# hash_code not affected by endpoints
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24)
finding_new.save()
# create an identical copy of the new finding, but with 1 extra endpoint. should not be marked as duplicate
finding_new3, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new3.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost4.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost2.com", protocol="https")
ep2.save()
finding_new3.endpoints.add(ep1)
finding_new3.endpoints.add(ep2)
finding_new3.save()
# expect: not marked as duplicate, hash_code affected by endpoints
self.assert_finding(finding_new3, not_pk=finding_new.pk, duplicate=False, not_hash_code=finding_new.hash_code)
def test_identical_legacy_no_endpoints_dynamic(self):
finding_new, finding_24 = self.copy_and_reset_finding_add_endpoints(id=24)
finding_new.save()
# create an identical copy of the new finding, but with no endpoints
finding_new3, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new3.save(dedupe_option=False)
finding_new3.save()
# expect: not marked as duplicate, hash_code affected by endpoints
self.assert_finding(finding_new3, not_pk=finding_new.pk, duplicate=False, not_hash_code=finding_new.hash_code)
# hash_code based algorithm tests
# existing findings in test 3 are from ZAP scanner, which uses hash_code algorithm with ['title', 'cwe', 'endpoints', 'severity']
def test_identical_hash_code(self):
# 4 is already a duplicate of 2, let's see what happens if we create an identical finding (but reset status)
# expect: marked as duplicate
finding_new, finding_4 = self.copy_and_reset_finding(id=4)
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=4, duplicate=True, duplicate_finding_id=finding_4.duplicate_finding.id, hash_code=finding_4.hash_code)
def test_identical_ordering_hash_code(self):
finding_2 = Finding.objects.get(id=2)
# 3 is already a duplicate of 2, but let's reset it's status. then update 24 and see if it gets marked as duplicate of 22 or 23
# expect: marked as duplicate of 3 as 3 is older (date field on finding)
# but feature or BUG? it will get marked as duplicate of 2 as it becomes earlier in the findings list (or ordering is by date desc)
finding_3 = Finding.objects.get(id=3)
finding_3.duplicate = False
finding_3.duplicate_finding = None
finding_3.active = True
finding_3.save(dedupe_option=False)
self.assert_finding(finding_3, duplicate=False, hash_code=finding_2.hash_code)
# create a copy of 2
finding_new, finding_2 = self.copy_and_reset_finding(id=2)
finding_new.save()
self.assert_finding(finding_new, not_pk=2, duplicate=True, duplicate_finding_id=finding_2.id, hash_code=finding_2.hash_code)
# self.assert_finding(finding_new, not_pk=2, duplicate=True, duplicate_finding_id=finding_3.id, hash_code=finding_2.hash_code)
def test_identical_except_title_hash_code(self):
# 4 is already a duplicate of 2, let's see what happens if we create an identical finding with different title (and reset status)
# expect: NOT marked as duplicate as title is part of hash_code calculation
finding_new, finding_4 = self.copy_and_reset_finding(id=4)
finding_new.title = 'the best title'
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=4, duplicate=False, not_hash_code=finding_4.hash_code)
def test_identical_except_description_hash_code(self):
# 4 is already a duplicate of 2, let's see what happens if we create an identical finding with different description (and reset status)
# expect: marked as duplicate
finding_new, finding_4 = self.copy_and_reset_finding(id=4)
finding_new.description = 'useless finding'
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=4, duplicate=True, duplicate_finding_id=finding_4.duplicate_finding.id, hash_code=finding_4.hash_code)
# TODO not usefile with ZAP?
def test_identical_except_line_hash_code(self):
# 4 is already a duplicate of 2, let's see what happens if we create an identical finding with different line (and reset status)
# expect: marked as duplicate
finding_new, finding_4 = self.copy_and_reset_finding(id=4)
finding_new.line = 666
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=4, duplicate=True, duplicate_finding_id=finding_4.duplicate_finding.id, hash_code=finding_4.hash_code)
# TODO not usefile with ZAP?
def test_identical_except_filepath_hash_code(self):
# 4 is already a duplicate of 2, let's see what happens if we create an identical finding with different file_path (and reset status)
# expect: marked as duplicate
finding_new, finding_4 = self.copy_and_reset_finding(id=4)
finding_new.file_path = '/dev/null'
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=4, duplicate=True, duplicate_finding_id=finding_4.duplicate_finding.id, hash_code=finding_4.hash_code)
def test_dedupe_inside_engagement_hash_code(self):
# finding 2 in engagement 1
# make a copy and store it in engagement 2, test 4
# should not result in being marked as duplicate as it crosses engagement boundaries
# both test 3 and 4 are ZAP scans (cross scanner dedupe is still not working very well)
finding_new, finding_2 = self.copy_and_reset_finding(id=2)
finding_new.test = Test.objects.get(id=4)
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=2, duplicate=False, hash_code=finding_2.hash_code)
def test_dedupe_not_inside_engagement_hash_code(self):
# finding 2 in engagement 1
# make a copy and store it in engagement 2, test 4
# should result in being marked as duplicate as dedupe inside engagement is set to False
# both test 3 and 4 are ZAP scans (cross scanner dedupe is still not working very well)
self.set_dedupe_inside_engagement(False)
finding_new, finding_2 = self.copy_and_reset_finding(id=2)
finding_new.test = Test.objects.get(id=4)
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=2, duplicate=True, duplicate_finding_id=2, hash_code=finding_2.hash_code)
# hash_code: if file_path and line or both empty and there are no endpoints, dedupe should happen (as opposed to legacy dedupe)
def test_identical_no_filepath_no_line_no_endpoints_hash_code(self):
finding_new, finding_2 = self.copy_and_reset_finding(id=2)
finding_new.file_path = None
finding_new.line = None
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=2, duplicate=True, duplicate_finding_id=2, hash_code=finding_2.hash_code)
def test_identical_hash_code_with_identical_endpoints(self):
finding_new, finding_4 = self.copy_and_reset_finding_add_endpoints(id=4) # has myhost.com, myhost2.com
finding_new.save()
# create an identical copy of the new finding, with the same endpoints
finding_new2, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new2.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new2.test.engagement.product, finding=finding_new2, host="myhost.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new2.test.engagement.product, finding=finding_new2, host="myhost2.com", protocol="https")
ep2.save()
finding_new2.endpoints.add(ep1)
finding_new2.endpoints.add(ep2)
finding_new2.save()
# expect: not marked as duplicate, hash_code affected by endpoints
self.assert_finding(finding_new2, not_pk=finding_new.pk, duplicate=True, duplicate_finding_id=finding_new.id, hash_code=finding_new.hash_code, not_hash_code=finding_4.hash_code)
def test_identical_hash_code_with_different_endpoints(self):
finding_new, finding_4 = self.copy_and_reset_finding_add_endpoints(id=4)
# save with dedupe so hash_code contains endpoints
finding_new.save()
# create an identical copy of the new finding, but with 1 extra endpoint. should not be marked as duplicate
finding_new3, finding_new = self.copy_and_reset_finding(id=finding_new.id)
finding_new3.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost4.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost2.com", protocol="https")
ep2.save()
ep3 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost3.com", protocol="https")
ep3.save()
finding_new3.endpoints.add(ep1)
finding_new3.endpoints.add(ep2)
finding_new3.endpoints.add(ep3)
finding_new3.save()
# expect: not marked as duplicate, hash_code affected by endpoints
self.assert_finding(finding_new3, not_pk=finding_new.pk, duplicate=False, not_hash_code=finding_4.hash_code)
# # unique_id algo uses id from tool. hash_code is still calculated, according to legacy field config Checkmarx detailed scan
def test_identical_unique_id(self):
# create identical copy
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
finding_new.save()
# expect duplicate
self.assert_finding(finding_new, not_pk=124, duplicate=True, duplicate_finding_id=124, hash_code=finding_124.hash_code)
def test_different_unique_id_unique_id(self):
# create identical copy
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
finding_new.unique_id_from_tool = '9999'
finding_new.save()
# expect not duplicate, but same hash_code
self.assert_finding(finding_new, not_pk=124, duplicate=False, hash_code=finding_124.hash_code)
def test_identical_ordering_unique_id(self):
# create identical copy
finding_new, finding_125 = self.copy_and_reset_finding(id=125)
finding_new.save()
# expect duplicate, but of 124 as that is first in the list, but it's newer then 125. feature or BUG?
self.assert_finding(finding_new, not_pk=124, duplicate=True, duplicate_finding_id=124, hash_code=finding_125.hash_code)
def test_title_description_line_filepath_different_unique_id(self):
# create identical copy, change some fields
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
finding_new.title = 'another title'
finding_new.cve = 'CVE-2020-12345'
finding_new.cwe = '456'
finding_new.description = 'useless finding'
finding_new.save()
# expect duplicate as we only match on unique id, hash_code also different
self.assert_finding(finding_new, not_pk=124, duplicate=True, duplicate_finding_id=124, not_hash_code=finding_124.hash_code)
def test_title_description_line_filepath_different_and_id_different_unique_id(self):
# create identical copy, change some fields
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
finding_new.title = 'another title'
finding_new.cve = 'CVE-2020-12345'
finding_new.cwe = '456'
finding_new.description = 'useless finding'
finding_new.unique_id_from_tool = '9999'
finding_new.save()
# expect not duplicate as we match on unique id, hash_code also different because fields changed
self.assert_finding(finding_new, not_pk=124, duplicate=False, not_hash_code=finding_124.hash_code)
def test_dedupe_not_inside_engagement_unique_id(self):
# create identical copy
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
# first setup some finding with same unique_id in different engagement, but same test_type
finding_22 = Finding.objects.get(id=22)
finding_22.test.test_type = finding_124.test.test_type
finding_22.test.save()
finding_22.unique_id_from_tool = '888'
finding_22.save(dedupe_option=False)
finding_new.unique_id_from_tool = '888'
finding_new.save()
# expect not duplicate as dedupe_inside_engagement is True
self.assert_finding(finding_new, not_pk=124, duplicate=False, hash_code=finding_124.hash_code)
def test_dedupe_inside_engagement_unique_id(self):
# create identical copy
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
# first setup some finding with same unique_id in same engagement, but different test (same test_type)
finding_new.test = Test.objects.get(id=66)
finding_new.save()
print(finding_new.pk)
print(finding_new.hash_code)
print(finding_new.duplicate)
# expect duplicate as dedupe_inside_engagement is True and the other test is in the same engagement
self.assert_finding(finding_new, not_pk=124, duplicate=True, duplicate_finding_id=124, hash_code=finding_124.hash_code)
def test_dedupe_inside_engagement_unique_id2(self):
# create identical copy
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
# first setup some finding with same unique_id in different engagement, but same test_type
self.set_dedupe_inside_engagement(False)
finding_22 = Finding.objects.get(id=22)
finding_22.test.test_type = finding_124.test.test_type
finding_22.test.save()
finding_22.unique_id_from_tool = '888'
finding_22.save(dedupe_option=False)
finding_new.unique_id_from_tool = '888'
finding_new.save()
# expect duplicate as dedupe_inside_engagement is false
self.assert_finding(finding_new, not_pk=124, duplicate=True, duplicate_finding_id=finding_22.id, hash_code=finding_124.hash_code)
def test_dedupe_same_id_different_test_type_unique_id(self):
# create identical copy
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
# first setup some finding from a different test_Type, but with the same unique_id_from_tool
finding_22 = Finding.objects.get(id=22)
finding_22.unique_id_from_tool = '888'
finding_new.unique_id_from_tool = '888'
# and we need to look in another engagement this time for finding_22
self.set_dedupe_inside_engagement(False)
finding_22.save(dedupe_option=False)
finding_new.save()
# expect not duplicate as the mathcing finding is from another test_type, hash_code is the same as original
self.assert_finding(finding_new, not_pk=124, duplicate=False, hash_code=finding_124.hash_code)
def test_identical_different_endpoints_unique_id(self):
# create identical copy
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.save()
# expect duplicate, as endpoints shouldn't affect dedupe and hash_code due to unique_id
self.assert_finding(finding_new, not_pk=124, duplicate=True, duplicate_finding_id=124, hash_code=finding_124.hash_code)
# algo unique_id_or_hash_code Veracode scan
def test_identical_unique_id_or_hash_code(self):
# create identical copy
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.save()
# expect duplicate as uid matches
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code)
# existing BUG? finding gets matched on hash code, while there is also an existing finding with matching unique_id_from_tool
def test_identical_unique_id_or_hash_code_bug(self):
# create identical copy
finding_124 = Finding.objects.get(id=124)
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.title = finding_124.title # use title from 124 to get matching hashcode
finding_new.save()
# it should match finding 224 as uid matches, but dd currently matches against 124 as that has the same hashcode and is earlier in the list of findings
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=124, hash_code=finding_124.hash_code)
def test_different_unique_id_unique_id_or_hash_code(self):
# create identical copy
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.unique_id_from_tool = '9999'
finding_new.save()
# expect duplicate, uid mismatch, but same hash_code
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=finding_224.id, hash_code=finding_224.hash_code)
# but if we change title and thus hash_code, it should no longer matchs
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.unique_id_from_tool = '9999'
finding_new.title = 'no no no no no no'
finding_new.save()
# expect duplicate, uid mismatch, but same hash_code
self.assert_finding(finding_new, not_pk=224, duplicate=False, not_hash_code=finding_224.hash_code)
def test_identical_ordering_unique_id_or_hash_code(self):
# create identical copy
finding_new, finding_225 = self.copy_and_reset_finding(id=225)
finding_new.save()
# expect duplicate, but of 124 as that is first in the list, but it's newer then 225. feature or BUG?
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_225.hash_code)
def test_title_description_line_filepath_different_unique_id_or_hash_code(self):
# create identical copy, change some fields
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.title = 'another title'
finding_new.cve = 'CVE-2020-12345'
finding_new.cwe = '456'
finding_new.description = 'useless finding'
finding_new.save()
# expect duplicate as we only match on unique id, hash_code also different
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, not_hash_code=finding_224.hash_code)
def test_title_description_line_filepath_different_and_id_different_unique_id_or_hash_code(self):
# create identical copy, change some fields
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.title = 'another title'
finding_new.cve = 'CVE-2020-12345'
finding_new.cwe = '456'
finding_new.description = 'useless finding'
finding_new.unique_id_from_tool = '9999'
finding_new.save()
# expect not duplicate as we match on unique id, hash_code also different because fields changed
self.assert_finding(finding_new, not_pk=224, duplicate=False, not_hash_code=finding_224.hash_code)
def test_dedupe_not_inside_engagement_same_hash_unique_id_or_hash_code(self):
# create identical copy
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
# first setup some finding with same unique_id in different engagement, but same test_type, same hash
finding_22 = Finding.objects.get(id=22)
finding_22.test.test_type = finding_224.test.test_type
finding_22.test.save()
finding_22.unique_id_from_tool = '888'
finding_22.save(dedupe_option=False)
finding_new.unique_id_from_tool = '888'
finding_new.save()
# should become duplicate of finding 22 because of the uid match, but existing BUG makes it duplicate of 224 due to hashcode match
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code)
def test_dedupe_not_inside_engagement_same_hash_unique_id_or_hash_code2(self):
# create identical copy
# create identical copy
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
# first setup some finding with same unique_id in different engagement, different test_type, same hash_code
finding_22 = Finding.objects.get(id=22)
finding_22.test.test_type = finding_224.test.test_type
finding_22.test.save()
finding_22.unique_id_from_tool = '333'
finding_22.save(dedupe_option=False)
finding_new.hash_code = finding_22.hash_code # sneaky copy of hash_code to be able to test this case icm with the bug in previous test case above
finding_new.unique_id_from_tool = '333'
finding_new.save()
# expect not duplicate as dedupe_inside_engagement is True and 22 is in another engagement
# but existing BUG? it is marked as duplicate of 124 which has the same hash and same engagement, but different unique_id_from_tool at same test_type
self.assert_finding(finding_new, not_pk=22, duplicate=True, duplicate_finding_id=124, hash_code=finding_22.hash_code)
def test_dedupe_inside_engagement_unique_id_or_hash_code(self):
# create identical copy
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
# first setup some finding with same unique_id in same engagement, but different test (same test_type)
finding_new.test = Test.objects.get(id=66)
finding_new.save()
# expect duplicate as dedupe_inside_engagement is True and the other test is in the same engagement
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code)
def test_dedupe_inside_engagement_unique_id_or_hash_code2(self):
# create identical copy
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
# first setup some finding with same unique_id in different engagement, but same test_type
self.set_dedupe_inside_engagement(False)
finding_22 = Finding.objects.get(id=22)
finding_22.test.test_type = finding_224.test.test_type
finding_22.test.save()
finding_22.unique_id_from_tool = '888'
finding_22.save(dedupe_option=False)
finding_new.unique_id_from_tool = '888'
finding_new.title = 'hack to work around bug that matches on hash_code first' # arrange different hash_code
finding_new.save()
# expect duplicate as dedupe_inside_engagement is false
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=finding_22.id, not_hash_code=finding_22.hash_code)
def test_dedupe_same_id_different_test_type_unique_id_or_hash_code(self):
# create identical copy
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
# first setup some finding from a different test_Type, but with the same unique_id_from_tool
finding_22 = Finding.objects.get(id=22)
finding_22.unique_id_from_tool = '888'
finding_new.unique_id_from_tool = '888'
# and we need to look in another engagement this time for finding_22
self.set_dedupe_inside_engagement(False)
finding_22.save(dedupe_option=False)
finding_new.title = 'title to change hash_code'
finding_new.save()
# expect not duplicate as the mathcing finding is from another test_type, hash_code is also different
self.assert_finding(finding_new, not_pk=224, duplicate=False, not_hash_code=finding_224.hash_code)
# same scenario but with idencital hash_code as 224 leads to being marked as duplicate of 224
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
# first setup some finding from a different test_Type, but with the same unique_id_from_tool
finding_22 = Finding.objects.get(id=22)
finding_22.unique_id_from_tool = '888'
finding_new.unique_id_from_tool = '888'
# and we need to look in another engagement this time for finding_22
self.set_dedupe_inside_engagement(False)
finding_22.save(dedupe_option=False)
finding_new.save()
# expect not duplicate as the mathcing finding is from another test_type, hash_code is also different
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code)
def test_identical_different_endpoints_unique_id_or_hash_code(self):
# create identical copy, so unique id is the same
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.save()
# expect duplicate, as endpoints shouldn't affect dedupe and hash_code due to unique_id
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code)
# same scenario, now with different uid. and different endpoints, but hash will be different due the endpoints because we set dynamic_finding to True
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.unique_id_from_tool = 1
finding_new.dynamic_finding = True
finding_new.save()
# different uid. and different endpoints, so different hash, so no duplicate
self.assert_finding(finding_new, not_pk=224, duplicate=False, not_hash_code=finding_224.hash_code)
# same scenario, now with different uid. and different endpoints, but hash will not be affected by endpoints because dynamic_finding is set to False
finding_new, finding_224 = self.copy_and_reset_finding(id=224)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.unique_id_from_tool = 1
finding_new.dynamic_finding = False
finding_new.save()
# different uid. and different endpoints, but hash will not be affected by endpoints because dynamic_finding is set to False
self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code)
# sync false positive history tests
def test_false_positive_history_with_dedupe_no_endpoints_identical(self):
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.false_p = False
finding_new.save()
# dedupe is enabled, hash_code matches, so new finding marked as duplicate AND copies false positive True from original
# feature or BUG? finding already marked as duplicate, should it als be marked as false positive?
# should we do the same for out_of_scope? risk accepted?
# should this be part of the dedupe process? or seperate as in false_p history?
self.assert_finding(finding_new, not_pk=22, duplicate=True, duplicate_finding_id=finding_22.id, hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
def test_false_positive_history_with_dedupe_no_endpoints_title_matches_but_not_hash_code(self):
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.cwe = 432
finding_new.false_p = False
finding_new.save()
# dedupe is enabled, hash_code doesn't matches, so new finding not marked as duplicate and also not recognized by false positive history
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, False)
def test_false_positive_history_with_dedupe_no_endpoints_cwe_matches_but_not_hash_code(self):
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.title = 'same same but different'
finding_new.false_p = False
finding_new.save()
# dedupe is enabled, hash_code doesn't matches, so new finding not marked as duplicate and also not recognized by false positive history
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, False)
def test_false_positive_history_without_dedupe_no_endpoints_identical(self):
self.enable_dedupe(enable=False)
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.false_p = False
finding_new.save()
# dedupe is disabled, hash_code matches, so marked as false positive
self.assert_finding(finding_new, not_pk=22, duplicate=False, hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
def test_false_positive_history_without_dedupe_no_endpoints_title_matches_but_not_hash_code(self):
self.enable_dedupe(enable=False)
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.cwe = 432
finding_new.false_p = False
finding_new.save()
# dedupe is disabled, hash_code doesn't matches, so not marked as false positive
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, False)
def test_false_positive_history_without_dedupe_no_endpoints_cwe_matches_but_not_hash_code(self):
self.enable_dedupe(enable=False)
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.title = 'same same but different'
finding_new.false_p = False
finding_new.save()
# dedupe is enabled, hash_code doesn't matches, so new finding not marked as duplicate and also not recognized by false positive history
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, False)
# false positive history with endpoints
def test_false_positive_history_with_dedupe_with_endpoints_identical(self):
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
ep1 = Endpoint(product=finding_22.test.engagement.product, finding=finding_22, host="myhostxxx.com", protocol="https")
ep1.save()
finding_22.endpoints.add(ep1)
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.false_p = False
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.save(false_history=True)
# dedupe is enabled, hash_code mismatche due to endpoints, so new finding not marked as duplicate AND copies false positive True from original even with mismatching endpoints
# feature or BUG? false positive status is copied when dedupe says it's not a dupe and endpoints are mismatching
self.assert_finding(finding_new, not_pk=22, duplicate=False, hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
def test_false_positive_history_with_dedupe_with_endpoints_title_matches_but_not_hash_code(self):
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
finding_22.false_p = True
ep1 = Endpoint(product=finding_22.test.engagement.product, finding=finding_22, host="myhostxxx.com", protocol="https")
ep1.save()
finding_22.endpoints.add(ep1)
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.false_p = False
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.cwe = 432
finding_new.save(false_history=True)
# dedupe is enabled, hash_code doesn't matches, so new finding not marked as duplicate but it IS recognized by false positive history because of the title matching
# feature or BUG? false positive status is copied when dedupe says it's not a dupe and endpoints are mismatching
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
def test_false_positive_history_with_dedupe_with_endpoints_cwe_matches_but_not_hash_code(self):
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
ep1 = Endpoint(product=finding_22.test.engagement.product, finding=finding_22, host="myhostxxx.com", protocol="https")
ep1.save()
finding_22.endpoints.add(ep1)
finding_22.false_p = True
finding_22.cwe = 123 # testdate has no CWE
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.title = 'same same but different'
finding_new.false_p = False
finding_new.save(false_history=True)
# dedupe is enabled, hash_code doesn't matches, so new finding not marked as duplicate but it IS recognized by false positive history because of the cwe matching
# feature or BUG? false positive status is copied when dedupe says it's not a dupe and endpoints are mismatching
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
def test_false_positive_history_without_dedupe_with_endpoints_identical(self):
self.enable_dedupe(enable=False)
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
ep1 = Endpoint(product=finding_22.test.engagement.product, finding=finding_22, host="myhostxxx.com", protocol="https")
ep1.save()
finding_22.endpoints.add(ep1)
finding_22.false_p = True
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.false_p = False
finding_new.save(false_history=True)
# dedupe is disabled, hash_code matches, so marked as false positive
self.assert_finding(finding_new, not_pk=22, duplicate=False, hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
def test_false_positive_history_without_dedupe_with_endpoints_title_matches_but_not_hash_code(self):
self.enable_dedupe(enable=False)
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
ep1 = Endpoint(product=finding_22.test.engagement.product, finding=finding_22, host="myhostxxx.com", protocol="https")
ep1.save()
finding_22.endpoints.add(ep1)
finding_22.false_p = True
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.cwe = 432
finding_new.false_p = False
finding_new.save(false_history=True)
# dedupe is disabled, hash_code doesn't matches, but it IS recognized by false positive history because of the title matching
# feature or BUG? false positive status is copied when dedupe says it's not a dupe and endpoints are mismatching
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
def test_false_positive_history_without_dedupe_with_endpoints_cwe_matches_but_not_hash_code(self):
self.enable_dedupe(enable=False)
self.enable_false_positive_history()
finding_22 = Finding.objects.get(id=22)
ep1 = Endpoint(product=finding_22.test.engagement.product, finding=finding_22, host="myhostxxx.com", protocol="https")
ep1.save()
finding_22.endpoints.add(ep1)
finding_22.cwe = 123 # test data has now CWE here
finding_22.false_p = True
finding_22.save(dedupe_option=False)
# create a copy of 22
finding_new, finding_22 = self.copy_and_reset_finding(id=22)
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
finding_new.endpoints.add(ep1)
finding_new.title = 'same same but different'
finding_new.false_p = False
finding_new.save(false_history=True)
# dedupe is disabled, hash_code doesn't matches, so new finding not marked as duplicate but it IS recognized by false positive history because of the cwe matching
# feature or BUG? false positive status is copied when dedupe says it's not a dupe and endpoints are mismatching
self.assert_finding(finding_new, not_pk=22, duplicate=False, not_hash_code=finding_22.hash_code)
self.assertEquals(finding_new.false_p, True)
# # some extra tests
# # hash_code currently is only created on finding creation and after that never changed. feature or BUG?
def test_hash_code_onetime(self):
finding_new, finding_2 = self.copy_and_reset_finding(id=2)
self.assertEqual(finding_new.hash_code, None)
finding_new.save()
self.assertTrue(finding_new.hash_code) # True -> not None
hash_code_at_creation = finding_new.hash_code
finding_new.title = 'new_title'
finding_new.cve = 999
# both title and cve affect hash_code for ZAP scans, but not here because hash_code was already calculated
finding_new.save()
self.assertEqual(finding_new.hash_code, hash_code_at_creation)
finding_new.save(dedupe_option=False)
self.assertEqual(finding_new.hash_code, hash_code_at_creation)
finding_new.save(dedupe_option=True)
self.assertEqual(finding_new.hash_code, hash_code_at_creation)
def test_identical_legacy_dedupe_option_true_false(self):
# 24 is already a duplicate of 22 let's see what happens if we create an identical finding (but reset status)
# expect: not marked as duplicate with dedupe_option-False
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.save(dedupe_option=False)
self.assert_finding(finding_new, not_pk=24, duplicate=False, hash_code=None)
# expect duplicate when saving with dedupe_option=True
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=True, duplicate_finding_id=finding_24.duplicate_finding.id, hash_code=finding_24.hash_code)
def test_duplicate_after_modification(self):
# we copy a finding but change some important fields so it's no longer a duplicate
# expect: not marked as duplicate with dedupe_option-False
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.title = 'new_title'
finding_new.cve = 999
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=False, not_hash_code=None)
# now when we change the title and cve back the same as finding_24, it should be marked as duplicate
# howwever defect dojo does NOT recalculate the hash_code, so it will not mark this finding as duplicate. feature or BUG?
finding_new.title = finding_24.title
finding_new.cve = finding_24.cve
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=False, not_hash_code=None)
def test_case_sensitiveness_hash_code_computation(self):
# hash_code calculation is case sensitive. feature or BUG?
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.title = finding_24.title.upper()
finding_new.save(dedupe_option=True)
self.assert_finding(finding_new, not_pk=24, duplicate=False, not_hash_code=finding_24.hash_code)
def test_title_case(self):
# currentlt the finding.save method applies title casing to the title
# 'absolutely great title' becomes 'Absolutely Great Title'
# as this affects deduplication (hash_code computation) we provide a test case here
# it will fail if someone removes title casing and force them to think about the implications
# ideally we will switch to case-in-sensitive hash_code computation.
# this could be a relatively small impact change as saving findings (currently) doesn't recompute the hash_code
finding_new, finding_24 = self.copy_and_reset_finding(id=24)
finding_new.title = 'the quick brown fox jumps over the lazy dog'
finding_new.save(dedupe_option=True)
self.assertEqual(finding_new.title, 'The Quick Brown Fox Jumps Over the Lazy Dog')
def test_hash_code_without_dedupe(self):
# if dedupe is disabled, hash_code should still be calculated
self.enable_dedupe(enable=False)
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
finding_new.save(dedupe_option=False)
# save skips hash_code generation if dedupe_option==False
self.assertFalse(finding_new.hash_code)
finding_new.save(dedupe_option=True)
self.assertTrue(finding_new.hash_code)
finding_new, finding_124 = self.copy_and_reset_finding(id=124)
finding_new.save()
# by default hash_code should be generated
self.assertTrue(finding_new.hash_code)
# # utility methods
def log_product(self, product):
if isinstance(product, int):
product = Product.objects.get(pk=product)
logger.debug('product %i: %s', product.id, product.name)
for eng in product.engagement_set.all():
self.log_engagement(eng)
for test in eng.test_set.all():
self.log_test(test)
def log_engagement(self, eng):
if isinstance(eng, int):
eng = Engagement.objects.get(pk=eng)
logger.debug('\t' + 'engagement %i: %s (dedupe_inside: %s)', eng.id, eng.name, eng.deduplication_on_engagement)
def log_test(self, test):
if isinstance(test, int):
test = Test.objects.get(pk=test)
logger.debug('\t\t' + 'test %i: %s (algo=%s, dynamic=%s)', test.id, test, test.dedupe_algo, test.test_type.dynamic_tool)
self.log_findings(test.finding_set.all())
def log_all_products(self):
for product in Product.objects.all():
self.log_summary(product=product)
def log_findings(self, findings):
if not findings:
logger.debug('\t\t' + 'no findings')
else:
logger.debug('\t\t' + 'findings:')
for finding in findings:
logger.debug('\t\t\t{:4.4}'.format(str(finding.id)) + ': "' + '{:20.20}'.format(finding.title) + '": ' + '{:5.5}'.format(finding.severity) + ': act: ' + '{:5.5}'.format(str(finding.active)) +
': ver: ' + '{:5.5}'.format(str(finding.verified)) + ': mit: ' + '{:5.5}'.format(str(finding.is_Mitigated)) +
': dup: ' + '{:5.5}'.format(str(finding.duplicate)) + ': dup_id: ' +
('{:4.4}'.format(str(finding.duplicate_finding.id)) if finding.duplicate_finding else 'None') + ': hash_code: ' + str(finding.hash_code) +
': eps: ' + str(finding.endpoints.count()) + ": notes: " + str([n.id for n in finding.notes.all()]) +
': uid: ' + '{:5.5}'.format(str(finding.unique_id_from_tool)) + (' fp' if finding.false_p else '')
)
logger.debug('\t\tendpoints')
for ep in Endpoint.objects.all():
logger.debug('\t\t\t' + str(ep.id) + ': ' + str(ep))
logger.debug('\t\t' + 'endpoint statuses')
for eps in Endpoint_Status.objects.all():
logger.debug('\t\t\t' + str(eps.id) + ': ' + str(eps))
def log_summary(self, product=None, engagement=None, test=None):
if product:
self.log_product(product)
if engagement:
self.log_engagement(engagement)
if test:
self.log_test(test)
if not product and not engagement and not test:
self.log_all_products()
def copy_and_reset_finding(self, id):
org = Finding.objects.get(id=id)
new = org
new.pk = None
new.duplicate = False
new.duplicate_finding = None
new.active = True
new.hash_code = None
# return unsaved new finding and reloaded existing finding
return new, Finding.objects.get(id=id)
def copy_and_reset_finding_add_endpoints(self, id, static=False, dynamic=True):
finding_new, finding_org = self.copy_and_reset_finding(id=id)
# remove file_path and line as we now have endpoints
finding_new.file_path = None
finding_new.line = None
finding_new.static_finding = static
finding_new.dynamic_finding = dynamic
# first save without dedupe to avoid hash_code calculation to happen without endpoints
finding_new.save(dedupe_option=False)
ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https")
ep1.save()
ep2 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost2.com", protocol="https")
ep2.save()
finding_new.endpoints.add(ep1)
finding_new.endpoints.add(ep2)
return finding_new, finding_org
def copy_and_reset_test(self, id):
org = Test.objects.get(id=id)
new = org
new.pk = None
# return unsaved new finding and reloaded existing finding
return new, Test.objects.get(id=id)
def copy_and_reset_engagement(self, id):
org = Engagement.objects.get(id=id)
new = org
new.pk = None
# return unsaved new finding and reloaded existing finding
return new, Engagement.objects.get(id=id)
def assert_finding(self, finding, not_pk=None, duplicate=False, duplicate_finding_id=None, hash_code=None, not_hash_code=None):
if not_pk:
self.assertNotEqual(finding.pk, not_pk)
self.assertEqual(finding.duplicate, duplicate)
if not duplicate:
self.assertFalse(finding.duplicate_finding) # False -> None
if duplicate_finding_id:
logger.debug('asserting that finding %i is a duplicate of %i', finding.id, duplicate_finding_id)
self.assertTrue(finding.duplicate_finding) # True -> not None
self.assertEqual(finding.duplicate_finding.id, duplicate_finding_id)
if hash_code:
self.assertEqual(finding.hash_code, hash_code)
if not_hash_code:
self.assertNotEqual(finding.hash_code, not_hash_code)
def set_dedupe_inside_engagement(self, deduplication_on_engagement):
for eng in Engagement.objects.all():
logger.debug('setting deduplication_on_engagment to %s for %i', str(deduplication_on_engagement), eng.id)
eng.deduplication_on_engagement = deduplication_on_engagement
eng.save()
def create_new_test_and_engagment_from_finding(self, finding):
eng_new, eng = self.copy_and_reset_engagement(id=finding.test.engagement.id)
eng_new.save()
test_new, test = self.copy_and_reset_test(id=finding.test.id)
test_new.engagement = eng_new
test_new.save()
return test_new, eng_new
def enable_dedupe(self, enable=True):
system_settings = System_Settings.objects.get()
system_settings.enable_deduplication = enable
system_settings.save()
def enable_false_positive_history(self, enable=True):
system_settings = System_Settings.objects.get()
system_settings.false_positive_history = enable
system_settings.save()
| 54.668831 | 229 | 0.70622 | 10,807 | 75,771 | 4.710373 | 0.048672 | 0.082507 | 0.021216 | 0.027031 | 0.824516 | 0.812396 | 0.797073 | 0.78841 | 0.768647 | 0.754189 | 0 | 0.045991 | 0.213723 | 75,771 | 1,385 | 230 | 54.708303 | 0.80845 | 0.346742 | 0 | 0.701005 | 0 | 0 | 0.034488 | 0.001241 | 0 | 0 | 0 | 0.000722 | 0.129397 | 1 | 0.10804 | false | 0 | 0.006281 | 0 | 0.123116 | 0.003769 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc51f966c1a825b8a92d1b1105ed65ebfddf37f2 | 7,644 | py | Python | analyze_foldamers/tests/test_torsion_clustering.py | shirtsgroup/analyze_foldamers | 17a7b948d1d0d4fbfb1d84d58753289404fb99a9 | [
"MIT"
] | null | null | null | analyze_foldamers/tests/test_torsion_clustering.py | shirtsgroup/analyze_foldamers | 17a7b948d1d0d4fbfb1d84d58753289404fb99a9 | [
"MIT"
] | 33 | 2020-08-05T23:00:56.000Z | 2022-03-21T22:37:03.000Z | analyze_foldamers/tests/test_torsion_clustering.py | shirtsgroup/analyze_foldamers | 17a7b948d1d0d4fbfb1d84d58753289404fb99a9 | [
"MIT"
] | null | null | null | """
Unit and regression test for the analyze_foldamers package.
"""
# Import package, test suite, and other packages as needed
import analyze_foldamers
import pytest
import sys
import os
import pickle
from cg_openmm.cg_model.cgmodel import CGModel
from analyze_foldamers.ensembles.cluster_torsion import *
current_path = os.path.dirname(os.path.abspath(__file__))
data_path = os.path.join(current_path, 'test_data')
def test_cluster_torsions_kmedoids_pdb(tmpdir):
"""Test Kmeans clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
n_clusters=2
frame_start=10
frame_stride=2
frame_end=-1
# Run KMeans clustering
medoid_positions, medoid_torsions, cluster_size, cluster_rmsd, silhouette_avg = \
cluster_torsions_KMedoids(
pdb_file_list,
cgmodel,
n_clusters=n_clusters,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_distance_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(cluster_rmsd) == n_clusters
assert os.path.isfile(f"{output_directory}/medoid_1.pdb")
assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf")
assert os.path.isfile(f"{output_directory}/torsion_distances_hist.pdf")
def test_cluster_torsions_kmedoids_dcd(tmpdir):
"""Test KMedoids clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
dcd_file_list = []
for i in range(number_replicas):
dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))
# Set clustering parameters
n_clusters=2
frame_start=10
frame_stride=2
frame_end=-1
# Run KMeans clustering
medoid_positions, medoid_torsions, cluster_size, cluster_rmsd, silhouette_avg = \
cluster_torsions_KMedoids(
dcd_file_list,
cgmodel,
n_clusters=n_clusters,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_format="dcd",
output_dir=output_directory,
plot_silhouette=True,
plot_distance_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(cluster_rmsd) == n_clusters
assert os.path.isfile(f"{output_directory}/medoid_1.dcd")
assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf")
assert os.path.isfile(f"{output_directory}/torsion_distances_hist.pdf")
def test_cluster_torsions_dbscan_pdb(tmpdir):
"""Test DBSCAN clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
min_samples=3
eps=50
frame_start=10
frame_stride=2
frame_end=-1
# Run OPTICS density-based clustering
medoid_positions, medoid_torsions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg = \
cluster_torsions_DBSCAN(
pdb_file_list,
cgmodel,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_distance_hist=True,
filter=True,
filter_ratio=0.20,
core_points_only=False,
)
assert os.path.isfile(f"{output_directory}/medoid_0.pdb")
assert os.path.isfile(f"{output_directory}/torsion_distances_hist.pdf")
def test_cluster_torsions_dbscan_dcd(tmpdir):
"""Test DBSCAN clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
dcd_file_list = []
for i in range(number_replicas):
dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))
# Set clustering parameters
min_samples=3
eps=50
frame_start=10
frame_stride=2
frame_end=-1
# Run OPTICS density-based clustering
medoid_positions, medoid_torsions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg = \
cluster_torsions_DBSCAN(
dcd_file_list,
cgmodel,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_format="dcd",
output_dir=output_directory,
plot_silhouette=True,
plot_distance_hist=True,
filter=True,
filter_ratio=0.20,
core_points_only=False,
)
assert os.path.isfile(f"{output_directory}/medoid_0.dcd")
assert os.path.isfile(f"{output_directory}/torsion_distances_hist.pdf")
def test_cluster_torsions_dbscan_dcd_core_medoids(tmpdir):
"""Test DBSCAN clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
dcd_file_list = []
for i in range(number_replicas):
dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))
# Set clustering parameters
min_samples=3
eps=50
frame_start=10
frame_stride=2
frame_end=-1
# Run OPTICS density-based clustering
medoid_positions, medoid_torsions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg = \
cluster_torsions_DBSCAN(
dcd_file_list,
cgmodel,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_format="dcd",
output_dir=output_directory,
plot_silhouette=True,
plot_distance_hist=True,
filter=True,
filter_ratio=0.20,
core_points_only=True,
)
assert os.path.isfile(f"{output_directory}/medoid_0.dcd")
assert os.path.isfile(f"{output_directory}/torsion_distances_hist.pdf") | 32.52766 | 96 | 0.634485 | 922 | 7,644 | 4.954447 | 0.125813 | 0.072242 | 0.031524 | 0.047285 | 0.914186 | 0.905867 | 0.905867 | 0.905867 | 0.903897 | 0.903897 | 0 | 0.012881 | 0.278912 | 7,644 | 235 | 97 | 32.52766 | 0.815856 | 0.1146 | 0 | 0.860606 | 0 | 0 | 0.121259 | 0.098426 | 0 | 0 | 0 | 0 | 0.084848 | 1 | 0.030303 | false | 0 | 0.042424 | 0 | 0.072727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fca35152a48107690b36d0c5c9b86a01ed6566f7 | 10,700 | py | Python | card_live_dashboard/test/unit/model/test_RGIParser.py | apetkau/card-live-dashboard | 02a2f5a840bf2c1f66362f593e8319cc35a4b45b | [
"Apache-2.0"
] | null | null | null | card_live_dashboard/test/unit/model/test_RGIParser.py | apetkau/card-live-dashboard | 02a2f5a840bf2c1f66362f593e8319cc35a4b45b | [
"Apache-2.0"
] | null | null | null | card_live_dashboard/test/unit/model/test_RGIParser.py | apetkau/card-live-dashboard | 02a2f5a840bf2c1f66362f593e8319cc35a4b45b | [
"Apache-2.0"
] | null | null | null | import pandas as pd
import numpy as np
from card_live_dashboard.model.RGIParser import RGIParser
RGI_DF = pd.DataFrame(
columns=['filename', 'rgi_main.Cut_Off', 'rgi_main.Drug Class', 'rgi_main.Best_Hit_ARO'],
data=[['file1', 'Perfect', 'class1; class2', 'gene1'],
['file1', 'Strict', 'class1; class2; class3', 'gene2'],
['file2', 'Perfect', 'class1; class2; class4', 'gene1'],
['file2', 'Perfect', 'class5', 'gene1'],
['file2', 'Perfect', '', 'gene1'],
['file3', None, None, None],
]
).set_index('filename')
RGI_PARSER = RGIParser(RGI_DF)
RGI_DF_NONE = pd.DataFrame(
columns=['filename', 'rgi_main.Cut_Off', 'rgi_main.Drug Class', 'rgi_main.Best_Hit_ARO'],
data=[['file1', None, '', None],
['file2', None, None, None]
]
).set_index('filename')
RGI_PARSER_NONE = RGIParser(RGI_DF_NONE)
RGI_DF_NA = pd.DataFrame(
columns=['filename', 'rgi_main.Cut_Off', 'rgi_main.Drug Class', 'rgi_main.Best_Hit_ARO'],
data=[['file1', None, '', pd.NA],
['file2', None, pd.NA, pd.NA]
]
).set_index('filename')
RGI_PARSER_NA = RGIParser(RGI_DF_NA)
RGI_DF_ONLY_EMPTY_STRING = pd.DataFrame(
columns=['filename', 'rgi_main.Cut_Off', 'rgi_main.Drug Class', 'rgi_main.Best_Hit_ARO'],
data=[['file1', None, '', pd.NA],
['file2', None, '', pd.NA]
]
).set_index('filename')
RGI_PARSER_ONLY_EMPTY_STRING = RGIParser(RGI_DF_ONLY_EMPTY_STRING)
RGI_DF_ONLY_NA= pd.DataFrame(
columns=['filename', 'rgi_main.Cut_Off', 'rgi_main.Drug Class', 'rgi_main.Best_Hit_ARO'],
data=[['file1', None, pd.NA, pd.NA],
['file2', None, np.nan, pd.NA]
]
).set_index('filename')
RGI_PARSER_ONLY_NA = RGIParser(RGI_DF_ONLY_NA)
RGI_DF_ONLY_NUMPY_NAN = pd.DataFrame(
columns=['filename', 'rgi_main.Cut_Off', 'rgi_main.Drug Class', 'rgi_main.Best_Hit_ARO'],
data=[['file1', np.nan, np.nan, np.nan],
['file2', np.nan, np.nan, np.nan]
]
).set_index('filename')
RGI_PARSER_ONLY_NUMPY_NAN = RGIParser(RGI_DF_ONLY_NUMPY_NAN)
def test_all_drugs():
assert {'class1', 'class2', 'class3', 'class4', 'class5'} == RGI_PARSER.all_drugs()
def test_all_drugs_only_none():
assert set() == RGI_PARSER_NONE.all_drugs()
def test_all_drugs_only_na():
assert set() == RGI_PARSER_NA.all_drugs()
def test_all_drugs_only_empty_string():
assert set() == RGI_PARSER_ONLY_EMPTY_STRING.all_drugs()
def test_all_drugs_only_na_values():
assert set() == RGI_PARSER_ONLY_NA.all_drugs()
def test_all_drugs_empty():
rgi_df_empty = pd.DataFrame(
columns=['filename', 'rgi_main.Cut_Off', 'rgi_main.Drug Class', 'rgi_main.Best_Hit_ARO'],
data=[]
).set_index('filename')
rgi_parser_empty = RGIParser(rgi_df_empty)
assert set() == rgi_parser_empty.all_drugs()
def test_all_amr_genes():
assert {'gene1', 'gene2'} == RGI_PARSER.all_amr_genes()
def test_all_amr_genes_only_none():
assert set() == RGI_PARSER_NONE.all_amr_genes()
def test_all_amr_genes_only_na():
assert set() == RGI_PARSER_NA.all_amr_genes()
def test_expand_drug_class():
expanded_df = RGI_PARSER.explode_column('rgi_main.Drug Class')
assert 11 == len(expanded_df)
assert ['file1', 'file1', 'file1', 'file1', 'file1',
'file2', 'file2', 'file2', 'file2', 'file2', 'file3'] == expanded_df.index.tolist()
value_counts = expanded_df['rgi_main.Drug Class'].groupby('filename').value_counts()
assert 2 == value_counts['file1']['class1; class2']
assert 3 == value_counts['file1']['class1; class2; class3']
assert 3 == value_counts['file2']['class1; class2; class4']
assert 1 == value_counts['file2']['class5']
assert 'file3' not in value_counts
assert ['class1', 'class2', 'class1', 'class2', 'class3',
'class1', 'class2', 'class4', 'class5'] == expanded_df['rgi_main.Drug Class_exploded'].dropna().tolist()
assert pd.isna(expanded_df.loc['file3', 'rgi_main.Drug Class_exploded'])
def test_expand_drug_class_none():
expanded_df = RGI_PARSER_NONE.explode_column('rgi_main.Drug Class')
assert 2 == len(expanded_df)
assert ['file1', 'file2'] == expanded_df.index.tolist()
assert '' == expanded_df.loc['file1', 'rgi_main.Drug Class']
assert [] == expanded_df['rgi_main.Drug Class_exploded'].dropna().tolist()
assert pd.isna(expanded_df.loc['file1', 'rgi_main.Drug Class_exploded'])
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class_exploded'])
def test_expand_drug_class_na():
expanded_df = RGI_PARSER_NA.explode_column('rgi_main.Drug Class')
assert 2 == len(expanded_df)
assert ['file1', 'file2'] == expanded_df.index.tolist()
assert '' == expanded_df.loc['file1', 'rgi_main.Drug Class']
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class'])
assert [] == expanded_df['rgi_main.Drug Class_exploded'].dropna().tolist()
assert pd.isna(expanded_df.loc['file1', 'rgi_main.Drug Class_exploded'])
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class_exploded'])
def test_expand_drug_class_only_empty_string():
expanded_df = RGI_PARSER_ONLY_EMPTY_STRING.explode_column('rgi_main.Drug Class')
assert 2 == len(expanded_df)
assert ['file1', 'file2'] == expanded_df.index.tolist()
assert '' == expanded_df.loc['file1', 'rgi_main.Drug Class']
assert '' == expanded_df.loc['file2', 'rgi_main.Drug Class']
assert [] == expanded_df['rgi_main.Drug Class_exploded'].dropna().tolist()
assert pd.isna(expanded_df.loc['file1', 'rgi_main.Drug Class_exploded'])
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class_exploded'])
def test_expand_drug_class_only_na():
expanded_df = RGI_PARSER_ONLY_NA.explode_column('rgi_main.Drug Class')
assert 2 == len(expanded_df)
assert ['file1', 'file2'] == expanded_df.index.tolist()
assert pd.isna(expanded_df.loc['file1', 'rgi_main.Drug Class'])
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class'])
assert [] == expanded_df['rgi_main.Drug Class_exploded'].dropna().tolist()
assert pd.isna(expanded_df.loc['file1', 'rgi_main.Drug Class_exploded'])
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class_exploded'])
def test_expand_drug_class_only_numpy_nan():
expanded_df = RGI_PARSER_ONLY_NUMPY_NAN.explode_column('rgi_main.Drug Class')
assert 2 == len(expanded_df)
assert ['file1', 'file2'] == expanded_df.index.tolist()
assert pd.isna(expanded_df.loc['file1', 'rgi_main.Drug Class'])
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class'])
assert [] == expanded_df['rgi_main.Drug Class_exploded'].dropna().tolist()
assert pd.isna(expanded_df.loc['file1', 'rgi_main.Drug Class_exploded'])
assert pd.isna(expanded_df.loc['file2', 'rgi_main.Drug Class_exploded'])
def test_select_by_drugclass_single1():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class1'])
assert 5 == len(new_parser.df_rgi)
assert ['file1', 'file1', 'file2', 'file2', 'file2'] == new_parser.df_rgi.index.tolist()
print(new_parser.df_rgi['rgi_main.Drug Class'].tolist())
assert ['class1; class2', 'class1; class2; class3',
'class1; class2; class4', 'class5', ''] == new_parser.df_rgi['rgi_main.Drug Class'].tolist()
def test_select_by_drugclass_single2():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class2'])
assert 5 == len(new_parser.df_rgi)
assert ['file1', 'file1', 'file2', 'file2', 'file2'] == new_parser.df_rgi.index.tolist()
assert ['class1; class2', 'class1; class2; class3',
'class1; class2; class4', 'class5', ''] == new_parser.df_rgi['rgi_main.Drug Class'].tolist()
def test_select_by_drugclass_single3():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class3'])
assert 2 == len(new_parser.df_rgi)
assert ['file1', 'file1'] == new_parser.df_rgi.index.tolist()
assert ['class1; class2', 'class1; class2; class3'] == new_parser.df_rgi['rgi_main.Drug Class'].tolist()
def test_select_by_drugclass_single4():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class4'])
assert 3 == len(new_parser.df_rgi)
assert ['file2', 'file2', 'file2'] == new_parser.df_rgi.index.tolist()
assert ['class1; class2; class4', 'class5', ''] == new_parser.df_rgi['rgi_main.Drug Class'].tolist()
def test_select_by_drugclass_multiple_1_2():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class1', 'class2'])
assert 5 == len(new_parser.df_rgi)
assert ['file1', 'file1', 'file2', 'file2', 'file2'] == new_parser.df_rgi.index.tolist()
assert ['class1; class2', 'class1; class2; class3',
'class1; class2; class4', 'class5', ''] == new_parser.df_rgi['rgi_main.Drug Class'].tolist()
def test_select_by_drugclass_multiple_1_3():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class1', 'class3'])
assert 2 == len(new_parser.df_rgi)
assert ['file1', 'file1'] == new_parser.df_rgi.index.tolist()
assert ['class1; class2', 'class1; class2; class3'] == new_parser.df_rgi['rgi_main.Drug Class'].tolist()
def test_select_by_drugclass_multiple_1_2_3():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class1', 'class2', 'class3'])
assert 2 == len(new_parser.df_rgi)
assert ['file1', 'file1'] == new_parser.df_rgi.index.tolist()
assert ['class1; class2', 'class1; class2; class3'] == new_parser.df_rgi['rgi_main.Drug Class'].tolist()
def test_select_by_drugclass_multiple_4_5():
new_parser = RGI_PARSER.select_by_elements_in_column_split(type='file', column='rgi_main.Drug Class',
elements=['class4', 'class5'])
assert 3 == len(new_parser.df_rgi)
assert ['file2', 'file2', 'file2'] == new_parser.df_rgi.index.tolist()
assert ['class1; class2; class4', 'class5', ''] == new_parser.df_rgi['rgi_main.Drug Class'].tolist() | 41.472868 | 116 | 0.665327 | 1,473 | 10,700 | 4.501697 | 0.0611 | 0.074951 | 0.094556 | 0.137536 | 0.872417 | 0.802141 | 0.789323 | 0.768813 | 0.723873 | 0.709395 | 0 | 0.025238 | 0.174206 | 10,700 | 258 | 117 | 41.472868 | 0.725215 | 0 | 0 | 0.461538 | 0 | 0 | 0.262312 | 0.013737 | 0 | 0 | 0 | 0 | 0.417582 | 1 | 0.126374 | false | 0 | 0.016484 | 0 | 0.142857 | 0.005495 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d9602117504c709dca0049c1531c6be3474999f | 208 | py | Python | django_rest_admin/rest_admin.py | inmagik/django-rest-admin | 61c0d1a993ebcf144352e0ee0f916d9e63c1ccf7 | [
"BSD-3-Clause"
] | 15 | 2015-11-13T00:22:11.000Z | 2020-02-04T12:07:05.000Z | django_rest_admin/rest_admin.py | inmagik/django-rest-admin | 61c0d1a993ebcf144352e0ee0f916d9e63c1ccf7 | [
"BSD-3-Clause"
] | null | null | null | django_rest_admin/rest_admin.py | inmagik/django-rest-admin | 61c0d1a993ebcf144352e0ee0f916d9e63c1ccf7 | [
"BSD-3-Clause"
] | 5 | 2015-11-13T11:23:19.000Z | 2019-08-06T18:43:58.000Z | from django_rest_admin.register import rest_admin
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
rest_admin.register(get_user_model())
rest_admin.register(Group)
| 29.714286 | 49 | 0.860577 | 33 | 208 | 5.151515 | 0.393939 | 0.211765 | 0.3 | 0.247059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.072115 | 208 | 6 | 50 | 34.666667 | 0.880829 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5ddb98a7e091e0d6c1aee4687ee34c4601c55841 | 73 | py | Python | wifi_wrapper/__init__.py | hrithik098/wifi_wrapper | dd438aff2adddfd0780b38257efc45099df536a0 | [
"MIT"
] | 1 | 2021-11-28T10:42:50.000Z | 2021-11-28T10:42:50.000Z | wifi_wrapper/__init__.py | hrithik098/wifi_wrapper | dd438aff2adddfd0780b38257efc45099df536a0 | [
"MIT"
] | null | null | null | wifi_wrapper/__init__.py | hrithik098/wifi_wrapper | dd438aff2adddfd0780b38257efc45099df536a0 | [
"MIT"
] | null | null | null | from wifi_wrapper.wifi import WiFi
import wifi_wrapper.subprocess_wrapper | 36.5 | 38 | 0.90411 | 11 | 73 | 5.727273 | 0.454545 | 0.349206 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.068493 | 73 | 2 | 38 | 36.5 | 0.926471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f8f5010d6d15f93f4ffd7a817fce705dc7b4e9ad | 154 | py | Python | statdepth/testing/__init__.py | braingeneers/functional_depth_methods | 40bbd91fb1e154e5f6b9f21e76e6fb79976dad38 | [
"MIT"
] | 1 | 2021-01-18T21:45:02.000Z | 2021-01-18T21:45:02.000Z | statdepth/testing/__init__.py | braingeneers/functional_depth_methods | 40bbd91fb1e154e5f6b9f21e76e6fb79976dad38 | [
"MIT"
] | 1 | 2021-03-22T16:03:20.000Z | 2021-03-22T16:03:20.000Z | statdepth/testing/__init__.py | braingeneers/functional_depth_methods | 40bbd91fb1e154e5f6b9f21e76e6fb79976dad38 | [
"MIT"
] | null | null | null | from ._generating import generate_noisy_pointcloud
from ._generating import generate_noisy_univariate
from ._generating import generate_noisy_multivariate | 51.333333 | 52 | 0.909091 | 18 | 154 | 7.277778 | 0.444444 | 0.320611 | 0.458015 | 0.641221 | 0.755725 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071429 | 154 | 3 | 52 | 51.333333 | 0.916084 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
5d10a8099e5342c633c5ee706a9c0e1ded87db15 | 5,522 | py | Python | tests/test_path.py | gbingersoll/pysvglib | 30c0c95f5beeacf6bfdc0adfab0eab47dab497a4 | [
"MIT"
] | 2 | 2021-01-05T12:51:10.000Z | 2021-07-20T07:27:30.000Z | tests/test_path.py | gbingersoll/pysvglib | 30c0c95f5beeacf6bfdc0adfab0eab47dab497a4 | [
"MIT"
] | null | null | null | tests/test_path.py | gbingersoll/pysvglib | 30c0c95f5beeacf6bfdc0adfab0eab47dab497a4 | [
"MIT"
] | null | null | null | from svg.path import Path
def test_Path_init_makes_path_tag_with_starting_point_and_additional_params():
path = Path((5, 15), style="xyz")
assert str(path) == '<path d="M 5 15" style="xyz"/>\n'
def test_Path_move_to_adds_a_move_command_to_the_path():
path = Path((0, 1), style="xyz")
path.move_to((2, 3))
assert str(path) == '<path d="M 0 1 M 2 3" style="xyz"/>\n'
def test_Path_move_to_adds_a_relative_move_command_to_the_path():
path = Path((0, 1), style="xyz").move_to((2, 3), relative=True)
assert str(path) == '<path d="M 0 1 m 2 3" style="xyz"/>\n'
def test_Path_line_to_adds_a_line_command_to_the_path():
path = Path((0, 1), style="xyz")
path.line_to((2, 3))
assert str(path) == '<path d="M 0 1 L 2 3" style="xyz"/>\n'
def test_Path_line_to_adds_a_relative_line_command_to_the_path():
path = Path((0, 1), style="xyz").line_to((2, 3), relative=True)
assert str(path) == '<path d="M 0 1 l 2 3" style="xyz"/>\n'
def test_Path_h_line_to_adds_a_horizontal_line_command_to_the_path():
path = Path((0, 1), style="xyz")
path.h_line(5)
assert str(path) == '<path d="M 0 1 H 5" style="xyz"/>\n'
def test_Path_h_line_to_adds_a_relative_horizontal_line_command_to_the_path():
path = Path((0, 1), style="xyz")
path.h_line(7, relative=True)
assert str(path) == '<path d="M 0 1 h 7" style="xyz"/>\n'
def test_Path_v_line_to_adds_a_vertical_line_command_to_the_path():
path = Path((0, 1), style="xyz")
path.v_line(3.11)
assert str(path) == '<path d="M 0 1 V 3.11" style="xyz"/>\n'
def test_Path_v_line_to_adds_a_relative_vertical_line_command_to_the_path():
path = Path((0, 1), style="xyz")
path.v_line(-1.2, relative=True)
assert str(path) == '<path d="M 0 1 v -1.2" style="xyz"/>\n'
def test_Path_close_adds_a_close_path_command_to_the_path():
path = Path((0, 1)).v_line(-1.2, relative=True)
path.h_line(2.5, relative=True)
path.close()
assert str(path) == '<path d="M 0 1 v -1.2 h 2.5 Z"/>\n'
path = Path((0, 2)).h_line(2.5, relative=True).close()
assert str(path) == '<path d="M 0 2 h 2.5 Z"/>\n'
def test_Path_c_bezier_adds_a_cubic_bezier_command_to_the_path():
path = Path((0, 1), style='xyz')
path.c_bezier((1, 2), (3, 4), (5, 6))
assert str(path) == '<path d="M 0 1 C 1 2, 3 4, 5 6" style="xyz"/>\n'
def test_Path_c_bezier_adds_a_relative_cubic_bezier_command_to_the_path():
path = Path((0, 1), style='xyz')
path.c_bezier((1, 2), (3, 4), (5, 6), relative=True)
assert str(path) == '<path d="M 0 1 c 1 2, 3 4, 5 6" style="xyz"/>\n'
def test_Path_c_bezier_adds_smooth_bezier_command_if_no_first_point():
path = Path((0, 1), style='xyz')
path.c_bezier(None, (3, 4), (5, 6))
assert str(path) == '<path d="M 0 1 S 3 4, 5 6" style="xyz"/>\n'
def test_Path_c_bezier_adds_relative_smooth_bezier_command_if_no_first_point():
path = Path((0, 1), style='xyz').c_bezier(
None, (3, 4), (5, 6), relative=True)
assert str(path) == '<path d="M 0 1 s 3 4, 5 6" style="xyz"/>\n'
def test_Path_q_bezier_adds_quadratic_bezier_command():
path = Path((0, 1), style='xyz')
path.q_bezier((3, 4), (5, 6))
assert str(path) == '<path d="M 0 1 Q 3 4, 5 6" style="xyz"/>\n'
def test_Path_q_bezier_adds_relative_quadratic_bezier_command():
path = Path((0, 1), style='xyz')
path.q_bezier((3, 4), (5, 6), relative=True)
assert str(path) == '<path d="M 0 1 q 3 4, 5 6" style="xyz"/>\n'
def test_Path_q_bezier_adds_smooth_quadratic_bezier_command_if_no_ctrl_pt():
path = Path((0, 1), style='xyz')
path.q_bezier(None, (5, 6))
assert str(path) == '<path d="M 0 1 T 5 6" style="xyz"/>\n'
def test_Path_q_bezier_adds_relative_smooth_quad_bezier_cmd_if_no_ctrl_pt():
path = Path((0, 1), style='xyz').q_bezier(None, (5, 6), relative=True)
assert str(path) == '<path d="M 0 1 t 5 6" style="xyz"/>\n'
def test_Path_arc_to_adds_arc_command_to_the_path():
path = Path((0, 1), style='xyz')
path.arc_to((13, 14), -30, False, False, (15, 16), relative=False)
assert str(path) == '<path d="M 0 1 A 13 14 -30 0 0 15 16" style="xyz"/>\n'
path = Path((0, 1), style='xyz')
path.arc_to((13, 14), -30, False, True, (15, 16), relative=False)
assert str(path) == '<path d="M 0 1 A 13 14 -30 0 1 15 16" style="xyz"/>\n'
path = Path((0, 1), style='xyz')
path.arc_to((13, 14), -30, True, False, (15, 16), relative=False)
assert str(path) == '<path d="M 0 1 A 13 14 -30 1 0 15 16" style="xyz"/>\n'
path = Path((0, 1), style='xyz')
path.arc_to((13, 14), -30, True, True, (15, 16), relative=False)
assert str(path) == '<path d="M 0 1 A 13 14 -30 1 1 15 16" style="xyz"/>\n'
def test_Path_arc_to_adds_relative_arc_command_to_the_path():
path = Path((0, 1), style='xyz')
path.arc_to((13, 14), -30, False, False, (15, 16), relative=True)
assert str(path) == '<path d="M 0 1 a 13 14 -30 0 0 15 16" style="xyz"/>\n'
path = Path((0, 1), style='xyz')
path.arc_to((13, 14), -30, False, True, (15, 16), relative=True)
assert str(path) == '<path d="M 0 1 a 13 14 -30 0 1 15 16" style="xyz"/>\n'
path = Path((0, 1), style='xyz')
path.arc_to((13, 14), -30, True, False, (15, 16), relative=True)
assert str(path) == '<path d="M 0 1 a 13 14 -30 1 0 15 16" style="xyz"/>\n'
path = Path((0, 1), style='xyz').arc_to(
(13, 14), -30, True, True, (15, 16), relative=True)
assert str(path) == '<path d="M 0 1 a 13 14 -30 1 1 15 16" style="xyz"/>\n'
| 39.442857 | 79 | 0.62586 | 1,078 | 5,522 | 2.978664 | 0.064935 | 0.166926 | 0.109312 | 0.142946 | 0.922765 | 0.91062 | 0.876985 | 0.872314 | 0.847088 | 0.817191 | 0 | 0.086726 | 0.181456 | 5,522 | 139 | 80 | 39.726619 | 0.623673 | 0 | 0 | 0.193878 | 0 | 0.102041 | 0.221297 | 0 | 0 | 0 | 0 | 0 | 0.27551 | 1 | 0.204082 | false | 0 | 0.010204 | 0 | 0.214286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d11442a6a99aec395bd6d5e137676b36130fe5f | 9,086 | py | Python | tests/test_train.py | PonteIneptique/kraken | 0f82bbb9717eb2e5b9ffb6db6e842a1c1008c7f7 | [
"Apache-2.0"
] | null | null | null | tests/test_train.py | PonteIneptique/kraken | 0f82bbb9717eb2e5b9ffb6db6e842a1c1008c7f7 | [
"Apache-2.0"
] | null | null | null | tests/test_train.py | PonteIneptique/kraken | 0f82bbb9717eb2e5b9ffb6db6e842a1c1008c7f7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
import json
import kraken
from pytest import raises
from pathlib import Path
from kraken.lib import xml
from kraken.lib.train import KrakenTrainer, RecognitionModel, SegmentationModel
from kraken.lib.exceptions import KrakenInputException
thisfile = Path(__file__).resolve().parent
resources = thisfile / 'resources'
class TestKrakenTrainer(unittest.TestCase):
"""
Tests for KrakenTrainer class
"""
def setUp(self):
self.xml = resources / '170025120000003,0074.xml'
self.bls = xml.parse_page(self.xml)
self.box_lines = [resources / '000236.png']
self.model = resources / 'model_small.mlmodel'
def test_krakentrainer_rec_box_load_fail(self):
training_data = self.box_lines
evaluation_data = self.box_lines
module = RecognitionModel(format_type='path',
model=self.model,
training_data=training_data,
evaluation_data=evaluation_data,
resize='fail')
with raises(KrakenInputException):
module.setup()
def test_krakentrainer_rec_bl_load_fail(self):
"""
Tests that the proper exception is raised when loading model not fitting the dataset.
"""
training_data = [self.xml]
evaluation_data = [self.xml]
module = RecognitionModel(format_type='xml',
model=self.model,
training_data=training_data,
evaluation_data=evaluation_data,
resize='fail')
with raises(KrakenInputException):
module.setup()
def test_krakentrainer_rec_box_load_add(self):
"""
Tests that adaptation works in add mode.
"""
training_data = self.box_lines
evaluation_data = self.box_lines
module = RecognitionModel(format_type='path',
model=self.model,
training_data=training_data,
evaluation_data=evaluation_data,
resize='add')
module.setup()
self.assertEqual(module.nn.seg_type, 'bbox')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.GroundTruthDataset)
trainer = KrakenTrainer(max_steps=1)
self.assertEqual(module.nn.named_spec[-1].split("c")[-1], '19')
def test_krakentrainer_rec_box_load_both(self):
"""
Tests that adaptation works in both mode.
"""
training_data = self.box_lines
evaluation_data = self.box_lines
module = RecognitionModel(format_type='path',
model=self.model,
training_data=training_data,
evaluation_data=evaluation_data,
resize='both')
module.setup()
self.assertEqual(module.nn.seg_type, 'bbox')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.GroundTruthDataset)
trainer = KrakenTrainer(max_steps=1)
self.assertEqual(module.nn.named_spec[-1].split("c")[-1], '16')
def test_krakentrainer_rec_box_append(self):
"""
Tests that appending new layers onto a loaded model works.
"""
training_data = self.box_lines
evaluation_data = self.box_lines
module = RecognitionModel(format_type='path',
model=self.model,
append=1,
spec='[Cr4,4,32]',
training_data=training_data,
evaluation_data=evaluation_data)
module.setup()
self.assertEqual(module.nn.seg_type, 'bbox')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.GroundTruthDataset)
self.assertTrue(module.nn.spec.startswith('[1,48,0,1 Cr{C_0}4,2,1,4,2 Cr{C_1}4,4,32 O{O_2}'))
trainer = KrakenTrainer(max_steps=1)
def test_krakentrainer_rec_bl_load(self):
training_data = [self.xml]
evaluation_data = [self.xml]
module = RecognitionModel(format_type='xml',
model=self.model,
training_data=training_data,
evaluation_data=evaluation_data,
resize='fail')
with raises(KrakenInputException):
module.setup()
def test_krakentrainer_rec_bl_load_add(self):
training_data = [self.xml]
evaluation_data = [self.xml]
module = RecognitionModel(format_type='xml',
model=self.model,
training_data=training_data,
evaluation_data=evaluation_data,
resize='add')
module.setup()
self.assertEqual(module.nn.seg_type, 'baselines')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.PolygonGTDataset)
trainer = KrakenTrainer(max_steps=1)
self.assertEqual(module.nn.named_spec[-1].split("c")[-1], '60')
def test_krakentrainer_rec_bl_load_both(self):
training_data = [self.xml]
evaluation_data = [self.xml]
module = RecognitionModel(format_type='xml',
model=self.model,
training_data=training_data,
evaluation_data=evaluation_data,
resize='both')
module.setup()
self.assertEqual(module.nn.seg_type, 'baselines')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.PolygonGTDataset)
trainer = KrakenTrainer(max_steps=1)
self.assertEqual(module.nn.named_spec[-1].split("c")[-1], '60')
def test_krakentrainer_rec_bl_append(self):
training_data = [self.xml]
evaluation_data = [self.xml]
module = RecognitionModel(format_type='xml',
model=self.model,
append=1,
spec='[Cr4,4,32]',
training_data=training_data,
evaluation_data=evaluation_data)
module.setup()
self.assertEqual(module.nn.seg_type, 'baselines')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.PolygonGTDataset)
self.assertTrue(module.nn.spec.startswith('[1,48,0,1 Cr{C_0}4,2,1,4,2 Cr{C_1}4,4,32 O{O_2}'))
trainer = KrakenTrainer(max_steps=1)
def test_krakentrainer_rec_box_path(self):
"""
Tests recognition trainer constructor with legacy path training data.
"""
training_data = self.box_lines
evaluation_data = self.box_lines
module = RecognitionModel(format_type='path',
training_data=training_data,
evaluation_data=evaluation_data)
module.setup()
self.assertEqual(module.nn.seg_type, 'bbox')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.GroundTruthDataset)
trainer = KrakenTrainer(max_steps=1)
def test_krakentrainer_rec_bl_xml(self):
"""
Tests recognition trainer constructor with XML training data.
"""
training_data = [self.xml]
evaluation_data = [self.xml]
module = RecognitionModel(format_type='xml',
training_data=training_data,
evaluation_data=evaluation_data)
module.setup()
self.assertEqual(module.nn.seg_type, 'baselines')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.PolygonGTDataset)
self.assertEqual(len(module.train_set.dataset), 44)
self.assertEqual(len(module.val_set.dataset), 44)
trainer = KrakenTrainer(max_steps=1)
def test_krakentrainer_rec_bl_dict(self):
"""
Tests recognition trainer constructor with dictionary style training data.
"""
training_data = [{'image': resources / 'bw.png', 'text': 'foo', 'baseline': [[10, 10], [300, 10]], 'boundary': [[10, 5], [300, 5], [300, 15], [10, 15]]}]
evaluation_data = [{'image': resources / 'bw.png', 'text': 'foo', 'baseline': [[10, 10], [300, 10]], 'boundary': [[10, 5], [300, 5], [300, 15], [10, 15]]}]
module = RecognitionModel(format_type=None,
training_data=training_data,
evaluation_data=evaluation_data)
module.setup()
self.assertEqual(module.nn.seg_type, 'baselines')
self.assertIsInstance(module.train_set.dataset, kraken.lib.dataset.PolygonGTDataset)
trainer = KrakenTrainer(max_steps=1)
| 44.980198 | 163 | 0.577922 | 930 | 9,086 | 5.437634 | 0.141935 | 0.092545 | 0.085426 | 0.071188 | 0.833894 | 0.822029 | 0.770417 | 0.770417 | 0.770417 | 0.770417 | 0 | 0.024243 | 0.323575 | 9,086 | 201 | 164 | 45.20398 | 0.798568 | 0.053599 | 0 | 0.778481 | 0 | 0.012658 | 0.045239 | 0.00285 | 0 | 0 | 0 | 0 | 0.164557 | 1 | 0.082278 | false | 0 | 0.050633 | 0 | 0.139241 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d1f1015833b9b555d60fdbfdd8b97d7b75f270d | 7,050 | gyp | Python | third_party/android_protobuf/android_protobuf.gyp | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2019-11-28T10:46:52.000Z | 2019-11-28T10:46:52.000Z | third_party/android_protobuf/android_protobuf.gyp | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | third_party/android_protobuf/android_protobuf.gyp | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2 | 2015-03-27T11:15:39.000Z | 2016-08-17T14:19:56.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'conditions': [
['OS=="android"', {
'targets': [
{
'target_name': 'protobuf_nano_javalib',
'type' : 'none',
'variables': {
# Using empty dir and additionalk_src_dirs since the nano package
# does not have a src/ subfolder.
'java_in_dir': '../../build/android/empty',
'additional_src_dirs': [ 'src/java/src/main/java/com/google/protobuf/nano' ],
},
'includes': [ '../../build/java.gypi' ],
},
{
# This proto compiler supports the nano profile, but should only be used for Android.
'target_name': 'android_protoc',
'type': 'executable',
'variables': {
'chromium_code': 0,
},
'toolsets': [ 'host' ],
'sources': [
'src/src/google/protobuf/descriptor.cc',
'src/src/google/protobuf/descriptor.pb.cc',
'src/src/google/protobuf/descriptor_database.cc',
'src/src/google/protobuf/dynamic_message.cc',
'src/src/google/protobuf/extension_set.cc',
'src/src/google/protobuf/extension_set_heavy.cc',
'src/src/google/protobuf/generated_message_reflection.cc',
'src/src/google/protobuf/generated_message_util.cc',
'src/src/google/protobuf/message.cc',
'src/src/google/protobuf/message_lite.cc',
'src/src/google/protobuf/reflection_ops.cc',
'src/src/google/protobuf/repeated_field.cc',
'src/src/google/protobuf/service.cc',
'src/src/google/protobuf/text_format.cc',
'src/src/google/protobuf/unknown_field_set.cc',
'src/src/google/protobuf/wire_format.cc',
'src/src/google/protobuf/wire_format_lite.cc',
'src/src/google/protobuf/compiler/code_generator.cc',
'src/src/google/protobuf/compiler/command_line_interface.cc',
'src/src/google/protobuf/compiler/importer.cc',
'src/src/google/protobuf/compiler/main.cc',
'src/src/google/protobuf/compiler/parser.cc',
'src/src/google/protobuf/compiler/plugin.cc',
'src/src/google/protobuf/compiler/plugin.pb.cc',
'src/src/google/protobuf/compiler/subprocess.cc',
'src/src/google/protobuf/compiler/zip_writer.cc',
'src/src/google/protobuf/compiler/cpp/cpp_enum.cc',
'src/src/google/protobuf/compiler/cpp/cpp_enum_field.cc',
'src/src/google/protobuf/compiler/cpp/cpp_extension.cc',
'src/src/google/protobuf/compiler/cpp/cpp_field.cc',
'src/src/google/protobuf/compiler/cpp/cpp_file.cc',
'src/src/google/protobuf/compiler/cpp/cpp_generator.cc',
'src/src/google/protobuf/compiler/cpp/cpp_helpers.cc',
'src/src/google/protobuf/compiler/cpp/cpp_message.cc',
'src/src/google/protobuf/compiler/cpp/cpp_message_field.cc',
'src/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc',
'src/src/google/protobuf/compiler/cpp/cpp_service.cc',
'src/src/google/protobuf/compiler/cpp/cpp_string_field.cc',
'src/src/google/protobuf/compiler/java/java_enum.cc',
'src/src/google/protobuf/compiler/java/java_enum_field.cc',
'src/src/google/protobuf/compiler/java/java_extension.cc',
'src/src/google/protobuf/compiler/java/java_field.cc',
'src/src/google/protobuf/compiler/java/java_file.cc',
'src/src/google/protobuf/compiler/java/java_generator.cc',
'src/src/google/protobuf/compiler/java/java_helpers.cc',
'src/src/google/protobuf/compiler/java/java_message.cc',
'src/src/google/protobuf/compiler/java/java_message_field.cc',
'src/src/google/protobuf/compiler/java/java_primitive_field.cc',
'src/src/google/protobuf/compiler/java/java_service.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_enum.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_enum_field.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_field.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_file.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_generator.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_helpers.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_message.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_message_field.cc',
'src/src/google/protobuf/compiler/javamicro/javamicro_primitive_field.cc',
'src/src/google/protobuf/compiler/javanano/javanano_enum.cc',
'src/src/google/protobuf/compiler/javanano/javanano_enum_field.cc',
'src/src/google/protobuf/compiler/javanano/javanano_extension.cc',
'src/src/google/protobuf/compiler/javanano/javanano_field.cc',
'src/src/google/protobuf/compiler/javanano/javanano_file.cc',
'src/src/google/protobuf/compiler/javanano/javanano_generator.cc',
'src/src/google/protobuf/compiler/javanano/javanano_helpers.cc',
'src/src/google/protobuf/compiler/javanano/javanano_message.cc',
'src/src/google/protobuf/compiler/javanano/javanano_message_field.cc',
'src/src/google/protobuf/compiler/javanano/javanano_primitive_field.cc',
'src/src/google/protobuf/compiler/python/python_generator.cc',
'src/src/google/protobuf/io/coded_stream.cc',
'src/src/google/protobuf/io/gzip_stream.cc',
'src/src/google/protobuf/io/printer.cc',
'src/src/google/protobuf/io/tokenizer.cc',
'src/src/google/protobuf/io/zero_copy_stream.cc',
'src/src/google/protobuf/io/zero_copy_stream_impl.cc',
'src/src/google/protobuf/io/zero_copy_stream_impl_lite.cc',
'src/src/google/protobuf/stubs/common.cc',
'src/src/google/protobuf/stubs/hash.cc',
'src/src/google/protobuf/stubs/once.cc',
'src/src/google/protobuf/stubs/structurally_valid.cc',
'src/src/google/protobuf/stubs/strutil.cc',
'src/src/google/protobuf/stubs/substitute.cc',
],
'include_dirs': [
'src/android',
'src/src',
],
'cflags': [
'-Wno-null-conversion',
'-Wno-tautological-undefined-compare',
],
'defines': [
# This macro must be defined to suppress the use
# of dynamic_cast<>, which requires RTTI.
'GOOGLE_PROTOBUF_NO_RTTI',
'GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER',
],
'dependencies': [
'../zlib/zlib.gyp:zlib',
],
},
],
}],
],
}
| 52.61194 | 95 | 0.62766 | 842 | 7,050 | 5.123515 | 0.182898 | 0.275846 | 0.228095 | 0.380158 | 0.771905 | 0.754752 | 0.603384 | 0.487483 | 0.356977 | 0.019471 | 0 | 0.000925 | 0.233617 | 7,050 | 133 | 96 | 53.007519 | 0.79752 | 0.059858 | 0 | 0.072581 | 0 | 0 | 0.701919 | 0.663998 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.008065 | 0 | 0.008065 | 0.008065 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d391e5d34ea71d4486e7b2e26e053fd473de79d | 122 | py | Python | flask/config.py | hacktx/prime2016 | 5b04129328a34a57a47b359c4773f24008744497 | [
"MIT",
"Unlicense"
] | null | null | null | flask/config.py | hacktx/prime2016 | 5b04129328a34a57a47b359c4773f24008744497 | [
"MIT",
"Unlicense"
] | null | null | null | flask/config.py | hacktx/prime2016 | 5b04129328a34a57a47b359c4773f24008744497 | [
"MIT",
"Unlicense"
] | null | null | null | import os
GITHUB_CLIENT_ID = os.environ['GITHUB_CLIENT_ID']
GITHUB_CLIENT_SECRET = os.environ['GITHUB_CLIENT_SECRET']
| 24.4 | 57 | 0.803279 | 18 | 122 | 5 | 0.388889 | 0.533333 | 0.311111 | 0.466667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07377 | 122 | 4 | 58 | 30.5 | 0.79646 | 0 | 0 | 0 | 0 | 0 | 0.295082 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.333333 | null | null | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
5d44acaf9fabfea2527ae895b072a63029edd1c4 | 131 | py | Python | faker/providers/user_agent/en_US/__init__.py | jacksmith15/faker | bc5dda1983e4d055aa2698ccf0806a462cb8370e | [
"MIT"
] | 12,077 | 2015-01-01T18:30:07.000Z | 2022-03-31T23:22:01.000Z | faker/providers/user_agent/en_US/__init__.py | jacksmith15/faker | bc5dda1983e4d055aa2698ccf0806a462cb8370e | [
"MIT"
] | 1,306 | 2015-01-03T05:18:55.000Z | 2022-03-31T02:43:04.000Z | faker/providers/user_agent/en_US/__init__.py | jacksmith15/faker | bc5dda1983e4d055aa2698ccf0806a462cb8370e | [
"MIT"
] | 1,855 | 2015-01-08T14:20:10.000Z | 2022-03-25T17:23:32.000Z | from .. import Provider as UserAgentProvider # pragma: no cover
class Provider(UserAgentProvider): # pragma: no cover
pass
| 21.833333 | 64 | 0.740458 | 15 | 131 | 6.466667 | 0.666667 | 0.474227 | 0.515464 | 0.618557 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.19084 | 131 | 5 | 65 | 26.2 | 0.915094 | 0.251908 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 9 |
5d55f8e93bea1439b5b5b29688bf433a2a08d46a | 17,488 | py | Python | deepcell_retinamask/image_generators_test.py | vanvalenlab/deepcell-retinamask | c922d4d836e881270da8b43c420c60d365883639 | [
"Apache-2.0"
] | null | null | null | deepcell_retinamask/image_generators_test.py | vanvalenlab/deepcell-retinamask | c922d4d836e881270da8b43c420c60d365883639 | [
"Apache-2.0"
] | 2 | 2021-11-26T11:18:44.000Z | 2022-01-21T11:29:41.000Z | deepcell_retinamask/image_generators_test.py | vanvalenlab/deepcell-retinamask | c922d4d836e881270da8b43c420c60d365883639 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016-2021 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/deepcell-retinamask/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for RetinaMask data generators"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.keras import backend as K
from tensorflow.keras.preprocessing.image import array_to_img
from tensorflow.keras.preprocessing.image import img_to_array
from tensorflow.python.platform import test
from deepcell_retinamask import image_generators
def _generate_test_images(img_w=21, img_h=21):
rgb_images = []
gray_images = []
for _ in range(8):
bias = np.random.rand(img_w, img_h, 1) * 64
variance = np.random.rand(img_w, img_h, 1) * (255 - 64)
imarray = np.random.rand(img_w, img_h, 3) * variance + bias
im = array_to_img(imarray, scale=False)
rgb_images.append(im)
imarray = np.random.rand(img_w, img_h, 1) * variance + bias
im = array_to_img(imarray, scale=False)
gray_images.append(im)
return [rgb_images, gray_images]
class TestRetinaNetDataGenerator(test.TestCase):
def test_retinanet_data_generator(self):
for test_images in _generate_test_images(21, 21):
img_list = []
for im in test_images:
img_list.append(img_to_array(im)[None, ...])
images = np.vstack(img_list)
generator = image_generators.RetinaNetGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
rotation_range=90.,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.5,
zoom_range=0.2,
channel_shift_range=1.,
brightness_range=(1, 5),
fill_mode='nearest',
cval=0.5,
horizontal_flip=True,
vertical_flip=True)
num_classes = np.random.randint(1, 3)
# Basic test before fit
train_dict = {
'X': np.random.random((8, 10, 10, 3)),
'y': np.random.random((8, 10, 10, 1)),
}
generator.flow(train_dict, num_classes=num_classes)
# Temp dir to save generated images
temp_dir = self.get_temp_dir()
# Fit
generator.fit(images, augment=True, seed=1)
y_shape = tuple(list(images.shape)[:-1] + [1])
train_dict['X'] = images
train_dict['y'] = np.random.randint(0, 9, size=y_shape)
for x, y in generator.flow(
train_dict,
num_classes=num_classes,
include_bbox=True,
include_masks=True,
save_to_dir=temp_dir,
shuffle=True):
self.assertIsInstance(x, dict)
self.assertEqual('input' in x, True)
self.assertEqual('boxes_input' in x, True)
self.assertEqual(x['input'].shape[1:], images.shape[1:])
self.assertIsInstance(y, dict)
self.assertEqual('regression' in y, True)
self.assertEqual('classification' in y, True)
r = y['regression']
l = y['classification']
self.assertEqual(r.shape[:-1], l.shape[:-1])
self.assertEqual(r.shape[-1], 5)
self.assertEqual(l.shape[-1], num_classes + 1)
break
def test_retinanet_data_generator_channels_first(self):
for test_images in _generate_test_images(21, 21):
img_list = []
for im in test_images:
img_list.append(img_to_array(im)[None, ...])
images = np.vstack(img_list)
images = np.rollaxis(images, 3, 1)
generator = image_generators.RetinaNetGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
rotation_range=90.,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.5,
zoom_range=0.2,
channel_shift_range=1.,
# brightness_range=(1, 5), # TODO: `channels_first` conflict
fill_mode='nearest',
cval=0.5,
horizontal_flip=True,
vertical_flip=True,
data_format='channels_first')
num_classes = np.random.randint(1, 3)
# Basic test before fit
train_dict = {
'X': np.random.random((8, 3, 10, 10)),
'y': np.random.random((8, 1, 10, 10)),
}
generator.flow(train_dict, num_classes=num_classes)
# Temp dir to save generated images
temp_dir = self.get_temp_dir()
# Fit
generator.fit(images, augment=True, seed=1)
y_shape = tuple([images.shape[0], 1] + list(images.shape)[2:])
train_dict['X'] = images
train_dict['y'] = np.random.randint(0, 9, size=y_shape)
for x, y in generator.flow(
train_dict,
num_classes=num_classes,
include_bbox=True,
include_masks=True,
save_to_dir=temp_dir,
shuffle=True):
self.assertIsInstance(x, dict)
self.assertEqual('input' in x, True)
self.assertEqual('boxes_input' in x, True)
self.assertEqual(x['input'].shape[1:], images.shape[1:])
self.assertIsInstance(y, dict)
self.assertEqual('regression' in y, True)
self.assertEqual('classification' in y, True)
r = y['regression']
l = y['classification']
self.assertEqual(r.shape[:-1], l.shape[:-1])
self.assertEqual(r.shape[-1], 5)
self.assertEqual(l.shape[-1], num_classes + 1)
break
def test_retinanet_data_generator_invalid_data(self):
generator = image_generators.RetinaNetGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
data_format='channels_last')
# Test fit with invalid data
with self.assertRaises(ValueError):
x = np.random.random((3, 10, 10))
generator.fit(x)
# Test flow with invalid dimensions
with self.assertRaises(ValueError):
train_dict = {
'X': np.random.random((8, 10, 10)),
'y': np.random.random((8, 10, 10))
}
generator.flow(train_dict)
# Test flow with non-matching batches
with self.assertRaises(Exception):
train_dict = {
'X': np.random.random((8, 10, 10, 1)),
'y': np.random.random((7, 10, 10, 1))
}
generator.flow(train_dict)
# Invalid number of channels: will work but raise a warning
generator.fit(np.random.random((8, 10, 10, 5)))
with self.assertRaises(ValueError):
generator = image_generators.RetinaNetGenerator(
data_format='unknown')
generator = image_generators.RetinaNetGenerator(
zoom_range=(2, 2))
with self.assertRaises(ValueError):
generator = image_generators.RetinaNetGenerator(
zoom_range=(2, 2, 2))
class TestRetinaMovieDataGenerator(test.TestCase):
def test_retinamovie_data_generator(self):
frames = 7
frames_per_batch = 5
for test_images in _generate_test_images(21, 21):
img_list = []
for im in test_images:
frame_list = []
for _ in range(frames):
frame_list.append(img_to_array(im)[None, ...])
img_stack = np.vstack(frame_list)
img_list.append(img_stack)
images = np.vstack(img_list)
batches = images.shape[0] // frames
images = np.reshape(images, tuple([batches, frames] +
list(images.shape[1:])))
generator = image_generators.RetinaMovieDataGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
rotation_range=90.,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.5,
zoom_range=0.2,
channel_shift_range=1.,
brightness_range=(1, 5),
fill_mode='nearest',
cval=0.5,
horizontal_flip=True,
vertical_flip=True)
num_classes = np.random.randint(1, 3)
# Basic test before fit
train_dict = {
'X': np.random.random((8, 11, 10, 10, 3)),
'y': np.random.random((8, 11, 10, 10, 1)),
}
generator.flow(train_dict, num_classes=num_classes)
# Temp dir to save generated images
temp_dir = self.get_temp_dir()
# Fit
# generator.fit(images, augment=True, seed=1)
y_shape = tuple(list(images.shape)[:-1] + [1])
train_dict['X'] = images
train_dict['y'] = np.random.randint(0, 9, size=y_shape)
for x, y in generator.flow(
train_dict,
frames_per_batch=frames_per_batch,
num_classes=num_classes,
include_bbox=True,
include_masks=True,
save_to_dir=temp_dir,
shuffle=True):
expected = list(images.shape)
expected[1] = frames_per_batch
expected = tuple(expected)
self.assertIsInstance(x, dict)
self.assertEqual('input' in x, True)
self.assertEqual('boxes_input' in x, True)
self.assertEqual(x['input'].shape[1:], expected[1:])
self.assertIsInstance(y, dict)
self.assertEqual('regression' in y, True)
self.assertEqual('classification' in y, True)
r = y['regression']
l = y['classification']
self.assertEqual(r.shape[:-1], l.shape[:-1])
self.assertEqual(r.shape[-1], 5)
self.assertEqual(l.shape[-1], num_classes + 1)
break
def test_retinamovie_data_generator_channels_first(self):
frames = 7
frames_per_batch = 5
for test_images in _generate_test_images(21, 21):
img_list = []
for im in test_images:
frame_list = []
for _ in range(frames):
frame_list.append(img_to_array(im)[None, ...])
img_stack = np.vstack(frame_list)
img_list.append(img_stack)
images = np.vstack(img_list)
batch_count = images.shape[0] // frames
images = np.reshape(images, tuple([batch_count, frames] +
list(images.shape[1:])))
images = np.rollaxis(images, 4, 1)
generator = image_generators.RetinaMovieDataGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
rotation_range=90.,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.5,
zoom_range=0.2,
channel_shift_range=1.,
# brightness_range=(1, 5), # TODO: `channels_first` conflict
fill_mode='nearest',
cval=0.5,
horizontal_flip=True,
vertical_flip=True,
data_format='channels_first')
num_classes = np.random.randint(1, 3)
# Basic test before fit
train_dict = {
'X': np.random.random((8, 3, 11, 10, 10)),
'y': np.random.random((8, 1, 11, 10, 10)),
}
generator.flow(train_dict, num_classes=num_classes)
# Temp dir to save generated images
temp_dir = self.get_temp_dir()
# Fit
# generator.fit(images, augment=True, seed=1)
y_shape = tuple([images.shape[0], 1] + list(images.shape)[2:])
train_dict['X'] = images
train_dict['y'] = np.random.randint(0, 9, size=y_shape)
for x, y in generator.flow(
train_dict,
num_classes=num_classes,
include_bbox=True,
include_masks=True,
frames_per_batch=frames_per_batch,
save_to_dir=temp_dir,
shuffle=True):
expected = list(images.shape)
expected[2] = frames_per_batch
expected = tuple(expected)
self.assertIsInstance(x, dict)
self.assertEqual('input' in x, True)
self.assertEqual('boxes_input' in x, True)
self.assertEqual(x['input'].shape[1:], expected[1:])
self.assertIsInstance(y, dict)
self.assertEqual('regression' in y, True)
self.assertEqual('classification' in y, True)
r = y['regression']
l = y['classification']
self.assertEqual(r.shape[:-1], l.shape[:-1])
self.assertEqual(r.shape[-1], 5)
self.assertEqual(l.shape[-1], num_classes + 1)
break
def test_retinamovie_data_generator_invalid_data(self):
generator = image_generators.RetinaMovieDataGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
data_format='channels_last')
# Test fit with invalid data
with self.assertRaises(ValueError):
x = np.random.random((3, 10, 10))
generator.fit(x)
# Test flow with invalid dimensions
with self.assertRaises(ValueError):
train_dict = {
'X': np.random.random((8, 10, 10, 1)),
'y': np.random.random((8, 10, 10, 1))
}
generator.flow(train_dict)
# Test flow with non-matching batches
with self.assertRaises(Exception):
train_dict = {
'X': np.random.random((8, 11, 10, 10, 1)),
'y': np.random.random((7, 11, 10, 10, 1))
}
generator.flow(train_dict)
# Test flow with bigger frames_per_batch than frames
with self.assertRaises(Exception):
train_dict = {
'X': np.random.random((8, 11, 10, 10, 1)),
'y': np.random.random((8, 11, 10, 10, 1))
}
generator.flow(train_dict, frames_per_batch=31)
# Invalid number of channels: will work but raise a warning
generator.fit(np.random.random((8, 3, 10, 10, 5)))
with self.assertRaises(ValueError):
generator = image_generators.MovieDataGenerator(
data_format='unknown')
generator = image_generators.MovieDataGenerator(
zoom_range=(2, 2))
with self.assertRaises(ValueError):
generator = image_generators.MovieDataGenerator(
zoom_range=(2, 2, 2))
| 39.387387 | 80 | 0.548204 | 1,955 | 17,488 | 4.717647 | 0.13913 | 0.029491 | 0.033395 | 0.029275 | 0.833894 | 0.81969 | 0.802125 | 0.793885 | 0.755611 | 0.736745 | 0 | 0.030356 | 0.350126 | 17,488 | 443 | 81 | 39.476298 | 0.78117 | 0.117338 | 0 | 0.815476 | 0 | 0 | 0.025879 | 0 | 0 | 0 | 0 | 0.002257 | 0.151786 | 1 | 0.020833 | false | 0 | 0.026786 | 0 | 0.056548 | 0.002976 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0706da42994ec489874e1767ef990c947f8d7515 | 9,640 | bzl | Python | compatibility/versions.bzl | daravep/daml | c33140cf88f80b3f212fd2a58b85d33e2e3f9711 | [
"Apache-2.0"
] | null | null | null | compatibility/versions.bzl | daravep/daml | c33140cf88f80b3f212fd2a58b85d33e2e3f9711 | [
"Apache-2.0"
] | null | null | null | compatibility/versions.bzl | daravep/daml | c33140cf88f80b3f212fd2a58b85d33e2e3f9711 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
# This file is autogenerated and should not be modified manually.
# Update versions/UpdateVersions.hs instead.
sdk_versions = [
"1.0.0",
"1.0.1",
"1.1.1",
"1.2.0",
"1.3.0",
"1.4.0",
"1.5.0",
"1.6.0",
"1.6.1",
"1.7.0",
"1.8.0",
"1.9.0-snapshot.20201215.5907.0.a6ed34c5",
"0.0.0",
]
platform_versions = [
"1.0.0",
"1.0.1",
"1.1.1",
"1.2.0",
"1.3.0",
"1.4.0",
"1.5.0",
"1.6.0",
"1.6.1",
"1.7.0",
"1.8.0",
"1.9.0-snapshot.20201215.5907.0.a6ed34c5",
"0.0.0",
]
stable_versions = [
"1.0.0",
"1.0.1",
"1.1.1",
"1.2.0",
"1.3.0",
"1.4.0",
"1.5.0",
"1.6.0",
"1.6.1",
"1.7.0",
"1.8.0",
"0.0.0",
]
latest_stable_version = "1.8.0"
version_sha256s = {
"1.0.0": {
"linux": "ee7e2f50394d44fb3490068de64d37f4f87534e802717bd7e07e8767df2e4e05",
"macos": "feb2086a9a01048300270c71eb212c8541cdec1082f541408250d6124bc307a8",
"windows": "2028efe1f505c1994e1abc41c0fb5181669cd46834818aa8276d04b0fb6eb034",
"test_tool": "cf66efafd9490e1256e825f377b208b8ae90151f56e411b596fbaaef91353e14",
"daml_types": "f85e5dd7ef1c5733826c1c79e316d2733344ac0da67f0d381ba70fc83a64fc78",
"daml_ledger": "5aab9a6cbdc987fc4279481152ff65bda503425f7e338c5123237b283aae44d6",
"daml_react": "30ab9db8a20df6cbfed7c1b42a45ca0ea55af6e150c013105d46c94afa5f9a46",
},
"1.0.1": {
"linux": "9cff04c29bb28503b41dcde310a2f3307984b1d976f8bccfb38268672e730c8f",
"macos": "a7e094e2d8766c852a247e0601b7c062c435b0d91f9ab256bbf4fcb40971ee36",
"windows": "41ecd44f3ea7c2a64a7f677f36b3f26dabaa5de913bc57bd680ea8f40f00ff0b",
"test_tool": "038de725b74f128fc0cb6f3ce8eef7d62da9527d0cbf25b93b7c1623bbb413c9",
"daml_types": "c50d5f37dbb42f45ae1f4f4013a72006ae7bbd531c68c363b54212a3458c5b6e",
"daml_ledger": "12fc3ef723171162128fb5951dec5452f75b1a3de7facf85a2b0126f46de159b",
"daml_react": "dc4cbf95f22cc0300af1f450be316bd55ebbc2816e9806231b13edce85bea44c",
},
"1.1.1": {
"linux": "1e396287b7462147d182fd2c536a5ee03163e3efeefc989d368b1dfc01a40a97",
"macos": "f49dd3f5198d3392b81bd64067f697bc0d18612fe9599a50580d57d1dd9c721c",
"windows": "c2ec9b51f7d4e44309d73f1ee9eb4919eac5c80452782b97d76251a0c283f596",
"test_tool": "f5b5024d7173e7f56b1d2ca57c8ee0b4a107208cd9d15f27f9ab1737ecfc33a4",
"daml_types": "142e7094081a95f202e0a5eaf72a15df724c2b9aa5e83bd22e4c544705de967e",
"daml_ledger": "15041d0be63f1781b9d036481093f49d56071078a4e826a862de921b5a806d93",
"daml_react": "3dabcccd1d4c13637cc096c401606f777f90368a4057dc7220269f8101aa1888",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.2.0": {
"linux": "3920012a013d2b162f1328addaaf6c8f322f0473c1f681146fa748039f838a77",
"macos": "2f0e39f864b288de7a8b55996916452c3226669054423ee921660eb3a3467b9f",
"windows": "0df31d5dda5a23ad48c0201135c0611881bdc49d7f5ecbf31fce24d2129e9626",
"test_tool": "7c41476ec2fff62b0b588f07cd7496b5fde8fcdb440a48d539041ac3df7141d6",
"daml_types": "339591a0213d33d55f73fc10f33edd8e7f3d1f6a7b560a0abddb5510e5fe9c26",
"daml_ledger": "873452dce84fab87b456b1da1d9d5bc2a74cf4c6f3c6872e409a1b4c3dd170f1",
"daml_react": "bb58c2c51befe75f7f50a4ccf90bd9ed2593cdfff6ac1c9a98e0a80f5dbf82f7",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.3.0": {
"linux": "556975da1fa4ac2f4c44825a1518f4ee01e6ba2de3f6dcc70ec24f9d13f2689a",
"macos": "43cb5b1c98115e1ddb7a4fe5ab629a2090cbbf177a5715d533dca76824552f9d",
"windows": "a2318e6b95a718028b65d1e84ee6aadcd1252ae78e0fce335ac2f0a903051eee",
"test_tool": "5fe64257452bece53d88fcb4d9dbe4f472f7f7e12c4a571ac39796ce47e8527f",
"daml_types": "41cdc3d0a9448869470054d3d9d0ea71c40309893e94474074369d1709a65676",
"daml_ledger": "acd6499ea37c5cfe069a9461dd1ab425293df166eed8085ee913e80e9a188510",
"daml_react": "a192c3b51faacaf95efc250442f7c6be4ae5ad227cf07f661f49c06548302b67",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.4.0": {
"linux": "28883d87c6b6780e6d13ef88ed4b6131135b2293520c2da143094744c112646f",
"macos": "2be95a05a6c7f67de1cc1cfa605d6d518c32b63c9893dfaa2558d3e9a7dfc70b",
"windows": "f8449378fb572f93ba4c3ef7716e69f5ddbdc60a9b7b1a4aeab2a867954bcac4",
"test_tool": "04aa064be948a9a1e88456c9fe0eaafe9d1e6280930039a5570b61df42c00129",
"daml_types": "cc47d4576c85eac5840c8733bd702304c282bb656603a6c621ea701d106e9ede",
"daml_ledger": "f112eac6597a0787008cf995894ab9bbe8e5f0e39a699e22f02c36080d88b12d",
"daml_react": "d7e50c4d94424e3f25df716105d00c4eb945f2709282a548ae93f250e306fc9c",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.5.0": {
"linux": "a9f92e5e773c8984e1ccf305dc96e254a1c73a1427626e3fd315066d6f4545e4",
"macos": "0e8e571d2a50c4e0a305e2c98c510a2b2272583a0c0bc48df7050b7196e827de",
"windows": "2eec0144ffedbdad069dfc49187daf72e94f12b2d27b66443b63d6244967dedd",
"test_tool": "539260ae20afbcc67f50e359a31a4a7f808d0b5fec6417b8cd6c73eefba4b13c",
"daml_types": "389fdf6aa51c7610986327cf2e9e02fe7bb91a5845692b4db32c73c67355f5d5",
"daml_ledger": "6fb560a8a482b6ddef30eef66e1e55e964fd7b9749ac5e9eceb3a98c51722d1d",
"daml_react": "2d7572e009fa10b1ca430e28a9d18cd60b6299ecb75f4a000967fb1322c2edb2",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.6.0": {
"linux": "03366e1303344ae9d2df1aafd8c58c66a5f262ca5a55d54fc272b4890a6300d6",
"macos": "e15f332a2042bc2ae27f90e42f489a6581d9c4082b01e7eefa2606a2f65648e2",
"windows": "c42635403c48f7a21b210a950c81538d11167a1479986699ff05f1ee95b2c04e",
"test_tool": "ed51e0fb8ca53035ac260c1c14c802d4d7b600cad4e395986955a6a0eec03975",
"daml_types": "4ca3d3dfb3c5c868b8fb72cff5fd98cb322688676f01e87040fc7c914fb65836",
"daml_ledger": "fbfafbc1d6406efe1bfa4f3a9344d374036876c21056888bcdeec927bf64aa77",
"daml_react": "48cfb5e7a1a8310d573f4a709ae98bd014528b5da3083d1a204a4e74254b7dec",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.6.1": {
"linux": "4785d2bd0bbc6a6b28a6324b3d7a173062cf34ee8740d03fc29cfcf04a056ee0",
"macos": "71a56cf51926406398ff3f59a3244ba6efff15dd75291892c5ed6313c36a0cf5",
"windows": "a8cc1100c8d8b49e93ea8124942593de92d4c2340b5027017b13a35ca403e7b4",
"test_tool": "4a7f1a69aff368bd387071b80375a22abd3f5156b9a7e1b4b30b2eaa3372dc2c",
"daml_types": "06b4d1ffd3ddc6f6508a3cfedc432d41b489f1ceb3555da5804b52ade155b947",
"daml_ledger": "2562effa524e90db71711d8b9a7c19a4e12f82de1adf434d213854a0290699c5",
"daml_react": "06b16112e8bfdbfd1f78585f9b4dfbcbe532965a698eaad2196d830345a31ca4",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.7.0": {
"linux": "e45ee32de11470ecaa5853bf9877ba73a641671859416c76beaf2896c3fa8cb7",
"macos": "df3dd4ed5c3916c53173f89315e61df63e9dec07f94f6c57c6842a4e6a8b4a3d",
"windows": "0eb92ae7a3a6951f3acbcf1e0fe8d1edf32169a07639a1aad41ddc44e842b772",
"test_tool": "743a94f133200c70cb9434ce9821e28dbc4ad925eb15ad013f0b314cd948c9e4",
"daml_types": "630ccee496a02aa26ab392d6960632ee512d0af24cf01d46a89b438a8da732c8",
"daml_ledger": "2ce8fb6afd7b9517ee8c7fb2d8810d6677ffecfb697ba122849a3d873219c726",
"daml_react": "de8916eaaa5c569c11b2fd3e27c23edfc1ac20bf2cab5f99793d4ef007f839a6",
"create_daml_app_patch": "4f299feff2b0ca08b24e59bd67aa0c851c9a83e7ca15fd868186583aeda35e89",
},
"1.8.0": {
"linux": "0c6272dd024687fc6e50c977de65bce10a0050d07d2a0a21386c802e0301e35b",
"macos": "cb61eb1868a272472ab1250c9a8d977c56446a91b43a1e246911a59ec5e19530",
"windows": "5d6603de5aa65ea91fbd3969e781721636865feb995ca48b9144599f8af2f820",
"test_tool": "003720d24ab77b78b1eb5247b1834e63050d6f2384d057f2dd81de25b0d38df8",
"daml_types": "47c0c94d56de42589dcffbf0b623ab39b24aa97182423cac599e2ed4671d1dfb",
"daml_ledger": "76805f2dc9e790c10e0c569ab25c5f149fe26cc9236d7f744cb130b7fd320811",
"daml_react": "9684f2eba565a877d3154c2acf2f07d454dbacaf2104b74f5af5a194cc295c8f",
"create_daml_app_patch": "4f299feff2b0ca08b24e59bd67aa0c851c9a83e7ca15fd868186583aeda35e89",
},
"1.9.0-snapshot.20201215.5907.0.a6ed34c5": {
"linux": "d681487ff836413f1bafb83e7c493da90cc62e6ad31f63ba8b62e76c57db3890",
"macos": "213bbde12f51de9ef7de92afddf9f8b6f061da638ed060fe1ea0b109a4214bee",
"windows": "157f467e209062e41aab3b1eff92e8ae65564072087075b017fa0b469417008f",
"test_tool": "b145349b8a986e2fd5add32de8deb2a61ad6712ed4ca7199b8386e48da9f6304",
"daml_types": "bff9999c460b8ae68884dc7333796c2002b644a545cb371ce189794589bc6e89",
"daml_ledger": "77bda9aafc2615f3ce004e2bb8370d76af1776d114bdf0fc0b68eef6c2779d9b",
"daml_react": "2499bd054bba11a196936a3cfb6a58cfff5d5b30166d7d01fc474bf6fd8a8219",
"create_daml_app_patch": "4f299feff2b0ca08b24e59bd67aa0c851c9a83e7ca15fd868186583aeda35e89",
},
}
| 56.705882 | 100 | 0.770228 | 480 | 9,640 | 15.29375 | 0.3125 | 0.007356 | 0.004087 | 0.02452 | 0.143713 | 0.132543 | 0.053535 | 0.03065 | 0.026291 | 0.026291 | 0 | 0.48625 | 0.136203 | 9,640 | 169 | 101 | 57.04142 | 0.39534 | 0.024689 | 0 | 0.290909 | 1 | 0 | 0.773332 | 0.675003 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
070747d931ce9ea27161947927189411a8620f99 | 208 | py | Python | lib/bx/align/tools/__init__.py | mr-c/bx-python | 0b2b766eee008d1f7a2814be4ddd2c5dc3787537 | [
"MIT"
] | 122 | 2015-07-01T12:00:22.000Z | 2022-03-02T09:27:35.000Z | lib/bx/align/tools/__init__.py | mr-c/bx-python | 0b2b766eee008d1f7a2814be4ddd2c5dc3787537 | [
"MIT"
] | 64 | 2015-11-06T21:03:18.000Z | 2022-03-24T00:55:27.000Z | lib/bx/align/tools/__init__.py | mr-c/bx-python | 0b2b766eee008d1f7a2814be4ddd2c5dc3787537 | [
"MIT"
] | 60 | 2015-10-05T19:19:36.000Z | 2021-11-19T20:53:54.000Z | """
Various utilities for working with `bx.align.Alignment` objects.
"""
from .chop import * # noqa: F40
from .fuse import * # noqa: F40
from .thread import * # noqa: F40
from .tile import * # noqa: F40
| 23.111111 | 64 | 0.668269 | 29 | 208 | 4.793103 | 0.586207 | 0.28777 | 0.374101 | 0.366906 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.048193 | 0.201923 | 208 | 8 | 65 | 26 | 0.789157 | 0.504808 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
0736c11fd6aec45422ff0adbc3a82758cf695416 | 42,282 | py | Python | mpf/core/segment_mappings.py | pmansukhani/mpf | 0979965d24bcaba9423b43581c6a18b847b1b900 | [
"MIT"
] | null | null | null | mpf/core/segment_mappings.py | pmansukhani/mpf | 0979965d24bcaba9423b43581c6a18b847b1b900 | [
"MIT"
] | null | null | null | mpf/core/segment_mappings.py | pmansukhani/mpf | 0979965d24bcaba9423b43581c6a18b847b1b900 | [
"MIT"
] | null | null | null | """Generic mappings for segment displays.
Those mappings are based on David Madison's awesome mappings: https://github.com/dmadison/LED-Segment-ASCII.
You can use convert_segments.py (based on https://github.com/dmadison/LED-Segment-ASCII/issues/2) to recreate them.
BCD were created by us.
"""
class TextToSegmentMapper:
"""Helper to map text to segments."""
@classmethod
def map_text_to_segments(cls, text, display_width, segment_mapping, embed_dots=True):
"""Map text to a list of segments.
Text is aligned to the right.
Optionally, it can embed dots into segments.
"""
segments = []
text_position = 0
while text_position < len(text):
char = text[text_position]
text_position += 1
mapping = segment_mapping.get(ord(char), segment_mapping[None])
if embed_dots and not mapping.dp:
# embed dots is enabled and dot is inactive
try:
next_char = text[text_position]
except IndexError:
next_char = " "
if next_char == ".":
# next char is a dot -> turn dot on
mapping = mapping.copy_with_dp_on()
text_position += 1
segments.append(mapping)
# remove leading segments if mapping is too long
if display_width < len(segments):
segments = segments[-display_width:]
while display_width > len(segments):
# prepend spaces to pad mapping
segments.insert(0, segment_mapping.get(ord(" "), segment_mapping[None]))
return segments
class Segment:
"""Mapping for a segment."""
__slots__ = ["dp", "char"]
def __init__(self, dp, char):
"""Initialise segment."""
self.dp = dp
self.char = char
def __repr__(self):
"""Return str representation."""
return "<" + " ".join(["{}={}".format(attr, getattr(self, attr)) for attr in dir(self) if
not attr.startswith("__") and not callable(getattr(self, attr))]) + ">"
def copy_with_dp_on(self):
"""Return a copy of the segment with dp on."""
attr = {attr: getattr(self, attr) for attr in dir(self) if not attr.startswith("__") and
not callable(getattr(self, attr))}
attr['dp'] = 1
new_segment = self.__class__(**attr)
return new_segment
def __eq__(self, other):
"""Compare to segments."""
attr_self = {attr: getattr(self, attr) for attr in dir(self) if not attr.startswith("__") and
not callable(getattr(self, attr))}
attr_other = {attr: getattr(other, attr) for attr in dir(other) if not attr.startswith("__") and
not callable(getattr(other, attr))}
return attr_self == attr_other
class BcdSegments(Segment):
"""Mapping for BCD segments with dot."""
__slots__ = ["x3", "x2", "x1", "x0"]
# pylint: disable-msg=too-many-arguments
def __init__(self, dp, x3, x2, x1, x0, char):
"""Create segment entry."""
super().__init__(dp, char)
self.x0 = x0
self.x1 = x1
self.x2 = x2
self.x3 = x3
def get_dpx4x3x2x1_encoding(self) -> bytes:
"""Return segment in dpx3x2x1x0 order."""
return bytes([(self.dp << 7) | (self.x3 << 3) | (self.x2 << 2) | (self.x1 << 1) | self.x0])
def get_x4x3x2x1_encoding(self) -> bytes:
"""Return segment in x3x2x1x0 order."""
return bytes([(self.x3 << 3) | (self.x2) << 2 | (self.x1) << 1 | self.x0])
bcd_segments = {
None: BcdSegments(dp=0, x3=0, x2=0, x1=0, x0=0, char="not mappable char"),
33: BcdSegments(dp=1, x3=0, x2=0, x1=0, x0=1, char="!"), # 1 with dot
48: BcdSegments(dp=0, x3=0, x2=0, x1=0, x0=0, char="0"),
49: BcdSegments(dp=0, x3=0, x2=0, x1=0, x0=1, char="1"),
50: BcdSegments(dp=0, x3=0, x2=0, x1=1, x0=0, char="2"),
51: BcdSegments(dp=0, x3=0, x2=0, x1=1, x0=1, char="3"),
52: BcdSegments(dp=0, x3=0, x2=1, x1=0, x0=0, char="4"),
53: BcdSegments(dp=0, x3=0, x2=1, x1=0, x0=1, char="5"),
54: BcdSegments(dp=0, x3=0, x2=1, x1=1, x0=0, char="6"),
55: BcdSegments(dp=0, x3=0, x2=1, x1=1, x0=1, char="7"),
56: BcdSegments(dp=0, x3=1, x2=0, x1=0, x0=0, char="8"),
57: BcdSegments(dp=0, x3=1, x2=0, x1=0, x0=1, char="9"),
63: BcdSegments(dp=1, x3=0, x2=0, x1=1, x0=0, char="?"), # 2 with dot
65: BcdSegments(dp=0, x3=1, x2=0, x1=1, x0=0, char="A"),
66: BcdSegments(dp=0, x3=1, x2=0, x1=1, x0=1, char="B"),
67: BcdSegments(dp=0, x3=1, x2=1, x1=0, x0=0, char="C"),
68: BcdSegments(dp=0, x3=1, x2=1, x1=0, x0=1, char="D"),
69: BcdSegments(dp=0, x3=1, x2=1, x1=1, x0=0, char="E"),
70: BcdSegments(dp=0, x3=1, x2=1, x1=1, x0=1, char="F"),
97: BcdSegments(dp=0, x3=1, x2=0, x1=1, x0=0, char="a"),
98: BcdSegments(dp=0, x3=1, x2=0, x1=1, x0=1, char="b"),
99: BcdSegments(dp=0, x3=1, x2=1, x1=0, x0=0, char="c"),
100: BcdSegments(dp=0, x3=1, x2=1, x1=0, x0=1, char="d"),
101: BcdSegments(dp=0, x3=1, x2=1, x1=1, x0=0, char="e"),
102: BcdSegments(dp=0, x3=1, x2=1, x1=1, x0=1, char="f"),
}
class SevenSegments(Segment):
"""Mapping for seven segments.
See segment order here: https://github.com/dmadison/LED-Segment-ASCII/blob/master/README.md.
"""
__slots__ = ["g", "f", "e", "d", "c", "b", "a"]
# pylint: disable-msg=too-many-arguments
def __init__(self, dp, g, f, e, d, c, b, a, char):
"""Create segment entry."""
super().__init__(dp, char)
self.a = a
self.b = b
self.c = c
self.d = d
self.e = e
self.f = f
self.g = g
def get_gfedcba_encoding(self) -> bytes:
"""Return segment in gfedcba order."""
return bytes([(self.g << 6) | (self.f << 5) | (self.e << 4) | (self.d << 3) | (self.c << 2) | (self.b << 1) |
self.a])
def get_dpgfedcba_encoding(self) -> bytes:
"""Return segment in dp gfedcba order."""
return bytes([(self.dp << 7) | (self.g << 6) | (self.f << 5) | (self.e << 4) | (self.d << 3) | (self.c << 2) |
(self.b << 1) | self.a])
def get_dpgfeabcd_encoding(self) -> bytes:
"""Return segment in dp gfeabcd order."""
return bytes([(self.dp << 7) | (self.g << 6) | (self.f << 5) | (self.e << 4) | (self.a << 3) | (self.b << 2) |
(self.c << 1) | self.d])
seven_segments = {
None: SevenSegments(dp=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="not mappable char"),
32: SevenSegments(dp=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="(space)"),
33: SevenSegments(dp=1, g=0, f=0, e=0, d=0, c=1, b=1, a=0, char="!"),
34: SevenSegments(dp=0, g=0, f=1, e=0, d=0, c=0, b=1, a=0, char="\""),
35: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=1, b=1, a=0, char="#"),
36: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=0, a=1, char="$"),
37: SevenSegments(dp=1, g=1, f=0, e=1, d=0, c=0, b=1, a=0, char="%"),
38: SevenSegments(dp=0, g=1, f=0, e=0, d=0, c=1, b=1, a=0, char="&"),
39: SevenSegments(dp=0, g=0, f=1, e=0, d=0, c=0, b=0, a=0, char="'"),
40: SevenSegments(dp=0, g=0, f=1, e=0, d=1, c=0, b=0, a=1, char="("),
41: SevenSegments(dp=0, g=0, f=0, e=0, d=1, c=0, b=1, a=1, char=")"),
42: SevenSegments(dp=0, g=0, f=1, e=0, d=0, c=0, b=0, a=1, char="*"),
43: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=0, b=0, a=0, char="+"),
44: SevenSegments(dp=0, g=0, f=0, e=1, d=0, c=0, b=0, a=0, char=","),
45: SevenSegments(dp=0, g=1, f=0, e=0, d=0, c=0, b=0, a=0, char="-"),
46: SevenSegments(dp=1, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="."),
47: SevenSegments(dp=0, g=1, f=0, e=1, d=0, c=0, b=1, a=0, char="/"),
48: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=1, b=1, a=1, char="0"),
49: SevenSegments(dp=0, g=0, f=0, e=0, d=0, c=1, b=1, a=0, char="1"),
50: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=0, b=1, a=1, char="2"),
51: SevenSegments(dp=0, g=1, f=0, e=0, d=1, c=1, b=1, a=1, char="3"),
52: SevenSegments(dp=0, g=1, f=1, e=0, d=0, c=1, b=1, a=0, char="4"),
53: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=0, a=1, char="5"),
54: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=1, b=0, a=1, char="6"),
55: SevenSegments(dp=0, g=0, f=0, e=0, d=0, c=1, b=1, a=1, char="7"),
56: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=1, b=1, a=1, char="8"),
57: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=1, a=1, char="9"),
58: SevenSegments(dp=0, g=0, f=0, e=0, d=1, c=0, b=0, a=1, char=":"),
59: SevenSegments(dp=0, g=0, f=0, e=0, d=1, c=1, b=0, a=1, char=";"),
60: SevenSegments(dp=0, g=1, f=1, e=0, d=0, c=0, b=0, a=1, char="<"),
61: SevenSegments(dp=0, g=1, f=0, e=0, d=1, c=0, b=0, a=0, char="="),
62: SevenSegments(dp=0, g=1, f=0, e=0, d=0, c=0, b=1, a=1, char=">"),
63: SevenSegments(dp=1, g=1, f=0, e=1, d=0, c=0, b=1, a=1, char="?"),
64: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=1, b=1, a=1, char="@"),
65: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=1, b=1, a=1, char="A"),
66: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=1, b=0, a=0, char="B"),
67: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=0, b=0, a=1, char="C"),
68: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=1, b=1, a=0, char="D"),
69: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=0, b=0, a=1, char="E"),
70: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=0, b=0, a=1, char="F"),
71: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=1, b=0, a=1, char="G"),
72: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=1, b=1, a=0, char="H"),
73: SevenSegments(dp=0, g=0, f=1, e=1, d=0, c=0, b=0, a=0, char="I"),
74: SevenSegments(dp=0, g=0, f=0, e=1, d=1, c=1, b=1, a=0, char="J"),
75: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=1, b=0, a=1, char="K"),
76: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=0, b=0, a=0, char="L"),
77: SevenSegments(dp=0, g=0, f=0, e=1, d=0, c=1, b=0, a=1, char="M"),
78: SevenSegments(dp=0, g=0, f=1, e=1, d=0, c=1, b=1, a=1, char="N"),
79: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=1, b=1, a=1, char="O"),
80: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=0, b=1, a=1, char="P"),
81: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=0, b=1, a=1, char="Q"),
82: SevenSegments(dp=0, g=0, f=1, e=1, d=0, c=0, b=1, a=1, char="R"),
83: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=0, a=1, char="S"),
84: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=0, b=0, a=0, char="T"),
85: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=1, b=1, a=0, char="U"),
86: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=1, b=1, a=0, char="V"),
87: SevenSegments(dp=0, g=0, f=1, e=0, d=1, c=0, b=1, a=0, char="W"),
88: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=1, b=1, a=0, char="X"),
89: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=1, a=0, char="Y"),
90: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=0, b=1, a=1, char="Z"),
91: SevenSegments(dp=0, g=0, f=1, e=1, d=1, c=0, b=0, a=1, char="["),
92: SevenSegments(dp=0, g=1, f=1, e=0, d=0, c=1, b=0, a=0, char="\""),
93: SevenSegments(dp=0, g=0, f=0, e=0, d=1, c=1, b=1, a=1, char="]"),
94: SevenSegments(dp=0, g=0, f=1, e=0, d=0, c=0, b=1, a=1, char="^"),
95: SevenSegments(dp=0, g=0, f=0, e=0, d=1, c=0, b=0, a=0, char="_"),
96: SevenSegments(dp=0, g=0, f=0, e=0, d=0, c=0, b=1, a=0, char="`"),
97: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=1, b=1, a=1, char="a"),
98: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=1, b=0, a=0, char="b"),
99: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=0, b=0, a=0, char="c"),
100: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=1, b=1, a=0, char="d"),
101: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=0, b=1, a=1, char="e"),
102: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=0, b=0, a=1, char="f"),
103: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=1, a=1, char="g"),
104: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=1, b=0, a=0, char="h"),
105: SevenSegments(dp=0, g=0, f=0, e=1, d=0, c=0, b=0, a=0, char="i"),
106: SevenSegments(dp=0, g=0, f=0, e=0, d=1, c=1, b=0, a=0, char="j"),
107: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=1, b=0, a=1, char="k"),
108: SevenSegments(dp=0, g=0, f=1, e=1, d=0, c=0, b=0, a=0, char="l"),
109: SevenSegments(dp=0, g=0, f=0, e=1, d=0, c=1, b=0, a=0, char="m"),
110: SevenSegments(dp=0, g=1, f=0, e=1, d=0, c=1, b=0, a=0, char="n"),
111: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=1, b=0, a=0, char="o"),
112: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=0, b=1, a=1, char="p"),
113: SevenSegments(dp=0, g=1, f=1, e=0, d=0, c=1, b=1, a=1, char="q"),
114: SevenSegments(dp=0, g=1, f=0, e=1, d=0, c=0, b=0, a=0, char="r"),
115: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=0, a=1, char="s"),
116: SevenSegments(dp=0, g=1, f=1, e=1, d=1, c=0, b=0, a=0, char="t"),
117: SevenSegments(dp=0, g=0, f=0, e=1, d=1, c=1, b=0, a=0, char="u"),
118: SevenSegments(dp=0, g=0, f=0, e=1, d=1, c=1, b=0, a=0, char="v"),
119: SevenSegments(dp=0, g=0, f=0, e=1, d=0, c=1, b=0, a=0, char="w"),
120: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=1, b=1, a=0, char="x"),
121: SevenSegments(dp=0, g=1, f=1, e=0, d=1, c=1, b=1, a=0, char="y"),
122: SevenSegments(dp=0, g=1, f=0, e=1, d=1, c=0, b=1, a=1, char="z"),
123: SevenSegments(dp=0, g=1, f=0, e=0, d=0, c=1, b=1, a=0, char="{"),
124: SevenSegments(dp=0, g=0, f=1, e=1, d=0, c=0, b=0, a=0, char="|"),
125: SevenSegments(dp=0, g=1, f=1, e=1, d=0, c=0, b=0, a=0, char="}"),
126: SevenSegments(dp=0, g=0, f=0, e=0, d=0, c=0, b=0, a=1, char="~"),
127: SevenSegments(dp=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="(del)"),
}
# pylint: disable-msg=too-many-instance-attributes
class FourteenSegments(Segment):
"""Mapping for fourteen segments.
See segment order here: https://github.com/dmadison/LED-Segment-ASCII/blob/master/README.md.
"""
__slots__ = ["l", "m", "n", "k", "j", "h", "g2", "g1", "f", "e", "d", "c", "b", "a"]
# pylint: disable-msg=too-many-arguments
# pylint: disable-msg=too-many-locals
def __init__(self, dp, l, m, n, k, j, h, g2, g1, f, e, d, c, b, a, char):
"""Create segment entry."""
super().__init__(dp, char)
self.a = a
self.b = b
self.c = c
self.d = d
self.e = e
self.f = f
self.g1 = g1
self.g2 = g2
self.h = h
self.j = j
self.k = k
self.n = n
self.m = m
self.l = l # noqa: E741
def get_pinmame_encoding(self) -> bytes:
"""Return segment in pinmame order."""
return bytes([
(self.g1 << 6) | (self.f << 5) | (self.e << 4) | (self.d << 3) | (self.c << 2) | (self.b << 1) | self.a,
(self.dp << 7) | (self.l << 6) | (self.m << 5) | (self.n << 4) | (self.g2 << 3) | (self.k << 2) |
(self.j << 1) | self.h])
def get_apc_encoding(self) -> bytes:
"""Return segment in d, c, b, a, e, f, g, comma + j, h, m, k, p, r, dp, n order."""
return bytes([
(self.dp << 7) | (self.g1 << 6) | (self.f << 5) | (self.e << 4) | (self.a << 3) | (self.b << 2) |
(self.c << 1) | self.d,
(self.l << 7) | (self.dp << 6) | (self.n << 5) | (self.m << 4) | (self.k << 3) | (self.g2 << 2) |
(self.h << 1) | self.j])
fourteen_segments = {
None: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, # noqa: E741
char="not mappable char"),
32: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="(space)"), # noqa: E741
33: FourteenSegments(dp=1, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=1, b=1, a=0, char="!"), # noqa: E741
34: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=1, a=0, char="\""), # noqa: E741
35: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=1, g1=1, f=0, e=0, d=1, c=1, b=1, a=0, char="#"), # noqa: E741
36: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=1, g1=1, f=1, e=0, d=1, c=1, b=0, a=1, char="$"), # noqa: E741
37: FourteenSegments(dp=0, l=1, m=1, n=1, k=1, j=1, h=1, g2=1, g1=1, f=1, e=0, d=0, c=1, b=0, a=0, char="%"), # noqa: E741
38: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=1, h=1, g2=0, g1=1, f=0, e=1, d=1, c=0, b=0, a=1, char="&"), # noqa: E741
39: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="'"), # noqa: E741
40: FourteenSegments(dp=0, l=1, m=0, n=0, k=1, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="("), # noqa: E741
41: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=1, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char=")"), # noqa: E741
42: FourteenSegments(dp=0, l=1, m=1, n=1, k=1, j=1, h=1, g2=1, g1=1, f=0, e=0, d=0, c=0, b=0, a=0, char="*"), # noqa: E741
43: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=1, g1=1, f=0, e=0, d=0, c=0, b=0, a=0, char="+"), # noqa: E741
44: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char=","), # noqa: E741
45: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=0, e=0, d=0, c=0, b=0, a=0, char="-"), # noqa: E741
46: FourteenSegments(dp=1, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="."), # noqa: E741
47: FourteenSegments(dp=0, l=0, m=0, n=1, k=1, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="/"), # noqa: E741
48: FourteenSegments(dp=0, l=0, m=0, n=1, k=1, j=0, h=0, g2=0, g1=0, f=1, e=1, d=1, c=1, b=1, a=1, char="0"), # noqa: E741
49: FourteenSegments(dp=0, l=0, m=0, n=0, k=1, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=1, b=1, a=0, char="1"), # noqa: E741
50: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=0, e=1, d=1, c=0, b=1, a=1, char="2"), # noqa: E741
51: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=0, f=0, e=0, d=1, c=1, b=1, a=1, char="3"), # noqa: E741
52: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=0, d=0, c=1, b=1, a=0, char="4"), # noqa: E741
53: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=0, h=0, g2=0, g1=1, f=1, e=0, d=1, c=0, b=0, a=1, char="5"), # noqa: E741
54: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=1, d=1, c=1, b=0, a=1, char="6"), # noqa: E741
55: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=1, b=1, a=1, char="7"), # noqa: E741
56: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=1, d=1, c=1, b=1, a=1, char="8"), # noqa: E741
57: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=0, d=1, c=1, b=1, a=1, char="9"), # noqa: E741
58: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char=":"), # noqa: E741
59: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char=";"), # noqa: E741
60: FourteenSegments(dp=0, l=1, m=0, n=0, k=1, j=0, h=0, g2=0, g1=1, f=0, e=0, d=0, c=0, b=0, a=0, char="<"), # noqa: E741
61: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=0, e=0, d=1, c=0, b=0, a=0, char="="), # noqa: E741
62: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=1, g2=1, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char=">"), # noqa: E741
63: FourteenSegments(dp=1, l=0, m=1, n=0, k=0, j=0, h=0, g2=1, g1=0, f=0, e=0, d=0, c=0, b=1, a=1, char="?"), # noqa: E741
64: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=1, h=0, g2=1, g1=0, f=1, e=1, d=1, c=0, b=1, a=1, char="@"), # noqa: E741
65: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=1, d=0, c=1, b=1, a=1, char="A"), # noqa: E741
66: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=1, g1=0, f=0, e=0, d=1, c=1, b=1, a=1, char="B"), # noqa: E741
67: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=1, c=0, b=0, a=1, char="C"), # noqa: E741
68: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=1, c=1, b=1, a=1, char="D"), # noqa: E741
69: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=1, f=1, e=1, d=1, c=0, b=0, a=1, char="E"), # noqa: E741
70: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=1, f=1, e=1, d=0, c=0, b=0, a=1, char="F"), # noqa: E741
71: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=0, f=1, e=1, d=1, c=1, b=0, a=1, char="G"), # noqa: E741
72: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=1, d=0, c=1, b=1, a=0, char="H"), # noqa: E741
73: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=1, c=0, b=0, a=1, char="I"), # noqa: E741
74: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=1, d=1, c=1, b=1, a=0, char="J"), # noqa: E741
75: FourteenSegments(dp=0, l=1, m=0, n=0, k=1, j=0, h=0, g2=0, g1=1, f=1, e=1, d=0, c=0, b=0, a=0, char="K"), # noqa: E741
76: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=1, c=0, b=0, a=0, char="L"), # noqa: E741
77: FourteenSegments(dp=0, l=0, m=0, n=0, k=1, j=0, h=1, g2=0, g1=0, f=1, e=1, d=0, c=1, b=1, a=0, char="M"), # noqa: E741
78: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=0, h=1, g2=0, g1=0, f=1, e=1, d=0, c=1, b=1, a=0, char="N"), # noqa: E741
79: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=1, c=1, b=1, a=1, char="O"), # noqa: E741
80: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=1, d=0, c=0, b=1, a=1, char="P"), # noqa: E741
81: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=1, c=1, b=1, a=1, char="Q"), # noqa: E741
82: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=1, d=0, c=0, b=1, a=1, char="R"), # noqa: E741
83: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=0, d=1, c=1, b=0, a=1, char="S"), # noqa: E741
84: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=1, char="T"), # noqa: E741
85: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=1, c=1, b=1, a=0, char="U"), # noqa: E741
86: FourteenSegments(dp=0, l=0, m=0, n=1, k=1, j=0, h=0, g2=0, g1=0, f=1, e=1, d=0, c=0, b=0, a=0, char="V"), # noqa: E741
87: FourteenSegments(dp=0, l=1, m=0, n=1, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=0, c=1, b=1, a=0, char="W"), # noqa: E741
88: FourteenSegments(dp=0, l=1, m=0, n=1, k=1, j=0, h=1, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="X"), # noqa: E741
89: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=1, e=0, d=1, c=1, b=1, a=0, char="Y"), # noqa: E741
90: FourteenSegments(dp=0, l=0, m=0, n=1, k=1, j=0, h=0, g2=0, g1=0, f=0, e=0, d=1, c=0, b=0, a=1, char="Z"), # noqa: E741
91: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=1, c=0, b=0, a=1, char="["), # noqa: E741
92: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=0, h=1, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="\""), # noqa: E741
93: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=1, c=1, b=1, a=1, char="]"), # noqa: E741
94: FourteenSegments(dp=0, l=1, m=0, n=1, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="^"), # noqa: E741
95: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=1, c=0, b=0, a=0, char="_"), # noqa: E741
96: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=1, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="`"), # noqa: E741
97: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=0, h=0, g2=0, g1=1, f=0, e=1, d=1, c=0, b=0, a=0, char="a"), # noqa: E741
98: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=0, h=0, g2=0, g1=1, f=1, e=1, d=1, c=0, b=0, a=0, char="b"), # noqa: E741
99: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=0, e=1, d=1, c=0, b=0, a=0, char="c"), # noqa: E741
100: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=0, g2=1, g1=0, f=0, e=0, d=1, c=1, b=1, a=0, char="d"), # noqa: E741
101: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=0, g2=0, g1=1, f=0, e=1, d=1, c=0, b=0, a=0, char="e"), # noqa: E741
102: FourteenSegments(dp=0, l=0, m=1, n=0, k=1, j=0, h=0, g2=1, g1=1, f=0, e=0, d=0, c=0, b=0, a=0, char="f"), # noqa: E741
103: FourteenSegments(dp=0, l=0, m=0, n=0, k=1, j=0, h=0, g2=1, g1=0, f=0, e=0, d=1, c=1, b=1, a=0, char="g"), # noqa: E741
104: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=0, h=0, g2=0, g1=1, f=1, e=1, d=0, c=0, b=0, a=0, char="h"), # noqa: E741
105: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="i"), # noqa: E741
106: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=1, h=0, g2=0, g1=0, f=0, e=1, d=0, c=0, b=0, a=0, char="j"), # noqa: E741
107: FourteenSegments(dp=0, l=1, m=1, n=0, k=1, j=1, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="k"), # noqa: E741
108: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=1, e=1, d=0, c=0, b=0, a=0, char="l"), # noqa: E741
109: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=0, h=0, g2=1, g1=1, f=0, e=1, d=0, c=1, b=0, a=0, char="m"), # noqa: E741
110: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=0, h=0, g2=0, g1=1, f=0, e=1, d=0, c=0, b=0, a=0, char="n"), # noqa: E741
111: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=1, g1=1, f=0, e=1, d=1, c=1, b=0, a=0, char="o"), # noqa: E741
112: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=1, g2=0, g1=1, f=1, e=1, d=0, c=0, b=0, a=0, char="p"), # noqa: E741
113: FourteenSegments(dp=0, l=0, m=0, n=0, k=1, j=0, h=0, g2=1, g1=0, f=0, e=0, d=0, c=1, b=1, a=0, char="q"), # noqa: E741
114: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=1, f=0, e=1, d=0, c=0, b=0, a=0, char="r"), # noqa: E741
115: FourteenSegments(dp=0, l=1, m=0, n=0, k=0, j=0, h=0, g2=1, g1=0, f=0, e=0, d=1, c=0, b=0, a=0, char="s"), # noqa: E741
116: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=1, f=1, e=1, d=1, c=0, b=0, a=0, char="t"), # noqa: E741
117: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=1, d=1, c=1, b=0, a=0, char="u"), # noqa: E741
118: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=0, g2=0, g1=0, f=0, e=1, d=0, c=0, b=0, a=0, char="v"), # noqa: E741
119: FourteenSegments(dp=0, l=1, m=0, n=1, k=0, j=0, h=0, g2=0, g1=0, f=0, e=1, d=0, c=1, b=0, a=0, char="w"), # noqa: E741
120: FourteenSegments(dp=0, l=1, m=0, n=1, k=1, j=0, h=1, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="x"), # noqa: E741
121: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=1, h=0, g2=1, g1=0, f=0, e=0, d=1, c=1, b=1, a=0, char="y"), # noqa: E741
122: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=0, g2=0, g1=1, f=0, e=0, d=1, c=0, b=0, a=0, char="z"), # noqa: E741
123: FourteenSegments(dp=0, l=0, m=0, n=1, k=0, j=0, h=1, g2=0, g1=1, f=0, e=0, d=1, c=0, b=0, a=1, char="{"), # noqa: E741
124: FourteenSegments(dp=0, l=0, m=1, n=0, k=0, j=1, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="|"), # noqa: E741
125: FourteenSegments(dp=0, l=1, m=0, n=0, k=1, j=0, h=0, g2=1, g1=0, f=0, e=0, d=1, c=0, b=0, a=1, char="}"), # noqa: E741
126: FourteenSegments(dp=0, l=0, m=0, n=1, k=1, j=0, h=0, g2=1, g1=1, f=0, e=0, d=0, c=0, b=0, a=0, char="~"), # noqa: E741
127: FourteenSegments(dp=0, l=0, m=0, n=0, k=0, j=0, h=0, g2=0, g1=0, f=0, e=0, d=0, c=0, b=0, a=0, char="(del)"), # noqa: E741
}
# pylint: disable-msg=too-many-instance-attributes
class SixteenSegments(Segment):
"""Mapping for sixteen segments.
See segment order here: https://github.com/dmadison/LED-Segment-ASCII/blob/master/README.md.
"""
__slots__ = ["u", "t", "s", "r", "p", "m", "n", "k", "h", "g", "f", "e", "d", "c", "b", "a"]
# pylint: disable-msg=too-many-arguments
# pylint: disable-msg=too-many-locals
def __init__(self, dp, u, t, s, r, p, m, n, k, h, g, f, e, d, c, b, a, char):
"""Create segment entry."""
super().__init__(dp, char)
self.a = a
self.b = b
self.c = c
self.d = d
self.e = e
self.f = f
self.g = g
self.h = h
self.k = k
self.n = n
self.m = m
self.p = p
self.r = r
self.s = s
self.t = t
self.u = u
sixteen_segments = {
None: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="not mappable char"),
32: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="(space)"),
33: SixteenSegments(dp=1, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=1, c=1, b=0, a=0, char="!"),
34: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=1, b=0, a=0,
char="\""),
35: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=1, n=0, m=1, k=0, h=0, g=0, f=1, e=1, d=1, c=1, b=0, a=0, char="#"),
36: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=1, n=0, m=1, k=0, h=1, g=0, f=1, e=1, d=1, c=0, b=1, a=1, char="$"),
37: SixteenSegments(dp=0, u=1, t=1, s=1, r=0, p=1, n=1, m=1, k=0, h=1, g=0, f=0, e=1, d=1, c=0, b=0, a=1, char="%"),
38: SixteenSegments(dp=0, u=1, t=0, s=0, r=1, p=0, n=0, m=1, k=1, h=0, g=1, f=1, e=1, d=0, c=0, b=0, a=1, char="&"),
39: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="'"),
40: SixteenSegments(dp=0, u=0, t=0, s=0, r=1, p=0, n=1, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="("),
41: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=0, m=0, k=1, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char=")"),
42: SixteenSegments(dp=0, u=1, t=1, s=1, r=1, p=1, n=1, m=1, k=1, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="*"),
43: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=1, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="+"),
44: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char=","),
45: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="-"),
46: SixteenSegments(dp=1, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="."),
47: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=1, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="/"),
48: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=1, m=0, k=0, h=1, g=1, f=1, e=1, d=1, c=1, b=1, a=1, char="0"),
49: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=1, m=0, k=0, h=0, g=0, f=0, e=0, d=1, c=1, b=0, a=0, char="1"),
50: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=0, g=1, f=1, e=1, d=0, c=1, b=1, a=1, char="2"),
51: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=0, g=0, f=1, e=1, d=1, c=1, b=1, a=1, char="3"),
52: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=0, f=0, e=0, d=1, c=1, b=0, a=0, char="4"),
53: SixteenSegments(dp=0, u=1, t=0, s=0, r=1, p=0, n=0, m=0, k=0, h=1, g=0, f=1, e=1, d=0, c=0, b=1, a=1, char="5"),
54: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=1, c=0, b=1, a=1, char="6"),
55: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=1, c=1, b=1, a=1, char="7"),
56: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=1, c=1, b=1, a=1, char="8"),
57: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=0, f=1, e=1, d=1, c=1, b=1, a=1, char="9"),
58: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char=":"),
59: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char=";"),
60: SixteenSegments(dp=0, u=1, t=0, s=0, r=1, p=0, n=1, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="<"),
61: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=0, g=0, f=1, e=1, d=0, c=0, b=0, a=0, char="="),
62: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=1, n=0, m=0, k=1, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char=">"),
63: SixteenSegments(dp=1, u=0, t=0, s=1, r=0, p=1, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=1, b=1, a=1, char="?"),
64: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=1, n=0, m=1, k=0, h=1, g=1, f=1, e=1, d=0, c=1, b=1, a=1, char="@"),
65: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=1, c=1, b=1, a=1, char="A"),
66: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=1, n=0, m=1, k=0, h=0, g=0, f=1, e=1, d=1, c=1, b=1, a=1, char="B"),
67: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=0, c=0, b=1, a=1, char="C"),
68: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=1, e=1, d=1, c=1, b=1, a=1, char="D"),
69: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=0, c=0, b=1, a=1, char="E"),
70: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=0, c=0, b=1, a=1, char="F"),
71: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=1, c=0, b=1, a=1, char="G"),
72: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=1, c=1, b=0, a=0, char="H"),
73: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=1, e=1, d=0, c=0, b=1, a=1, char="I"),
74: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=1, e=1, d=1, c=1, b=0, a=0, char="J"),
75: SixteenSegments(dp=0, u=1, t=0, s=0, r=1, p=0, n=1, m=0, k=0, h=1, g=1, f=0, e=0, d=0, c=0, b=0, a=0, char="K"),
76: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=0, c=0, b=0, a=0, char="L"),
77: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=1, m=0, k=1, h=1, g=1, f=0, e=0, d=1, c=1, b=0, a=0, char="M"),
78: SixteenSegments(dp=0, u=0, t=0, s=0, r=1, p=0, n=0, m=0, k=1, h=1, g=1, f=0, e=0, d=1, c=1, b=0, a=0, char="N"),
79: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=1, c=1, b=1, a=1, char="O"),
80: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=0, c=1, b=1, a=1, char="P"),
81: SixteenSegments(dp=0, u=0, t=0, s=0, r=1, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=1, c=1, b=1, a=1, char="Q"),
82: SixteenSegments(dp=0, u=1, t=0, s=0, r=1, p=1, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=0, c=1, b=1, a=1, char="R"),
83: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=0, f=1, e=1, d=1, c=0, b=1, a=1, char="S"),
84: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=1, a=1, char="T"),
85: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=1, d=1, c=1, b=0, a=0, char="U"),
86: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=1, m=0, k=0, h=1, g=1, f=0, e=0, d=0, c=0, b=0, a=0, char="V"),
87: SixteenSegments(dp=0, u=0, t=1, s=0, r=1, p=0, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=1, c=1, b=0, a=0, char="W"),
88: SixteenSegments(dp=0, u=0, t=1, s=0, r=1, p=0, n=1, m=0, k=1, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="X"),
89: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=1, n=0, m=0, k=0, h=1, g=0, f=1, e=1, d=1, c=1, b=0, a=0, char="Y"),
90: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=1, m=0, k=0, h=0, g=0, f=1, e=1, d=0, c=0, b=1, a=1, char="Z"),
91: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=1, d=0, c=0, b=1, a=0, char="["),
92: SixteenSegments(dp=0, u=0, t=0, s=0, r=1, p=0, n=0, m=0, k=1, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="\""),
93: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=1, e=0, d=0, c=0, b=0, a=1, char="]"),
94: SixteenSegments(dp=0, u=0, t=1, s=0, r=1, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="^"),
95: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=1, e=1, d=0, c=0, b=0, a=0, char="_"),
96: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=1, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0, char="`"),
97: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=1, e=1, d=0, c=0, b=0, a=0, char="a"),
98: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=0, d=0, c=0, b=0, a=0, char="b"),
99: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=1, e=0, d=0, c=0, b=0, a=0, char="c"),
100: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=1, n=0, m=0, k=0, h=0, g=0, f=0, e=1, d=1, c=1, b=0, a=0,
char="d"),
101: SixteenSegments(dp=0, u=1, t=1, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=1, e=0, d=0, c=0, b=0, a=0,
char="e"),
102: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=1, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=1, a=0,
char="f"),
103: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=1, g=0, f=1, e=0, d=0, c=0, b=0, a=1,
char="g"),
104: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=0, c=0, b=0, a=0,
char="h"),
105: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="i"),
106: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=1, f=1, e=0, d=0, c=0, b=0, a=0,
char="j"),
107: SixteenSegments(dp=0, u=0, t=0, s=1, r=1, p=0, n=1, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="k"),
108: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=0, e=0, d=0, c=0, b=0, a=0,
char="l"),
109: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=1, n=0, m=0, k=0, h=0, g=1, f=0, e=0, d=1, c=0, b=0, a=0,
char="m"),
110: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=0, e=0, d=0, c=0, b=0, a=0,
char="n"),
111: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=1, e=0, d=0, c=0, b=0, a=0,
char="o"),
112: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=0, n=0, m=1, k=0, h=1, g=1, f=0, e=0, d=0, c=0, b=0, a=1,
char="p"),
113: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=1, g=0, f=0, e=0, d=0, c=0, b=0, a=1,
char="q"),
114: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=0, e=0, d=0, c=0, b=0, a=0,
char="r"),
115: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=1, g=0, f=1, e=0, d=0, c=0, b=0, a=1,
char="s"),
116: SixteenSegments(dp=0, u=1, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=1, g=1, f=1, e=0, d=0, c=0, b=0, a=0,
char="t"),
117: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=1, e=0, d=0, c=0, b=0, a=0,
char="u"),
118: SixteenSegments(dp=0, u=0, t=1, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=1, f=0, e=0, d=0, c=0, b=0, a=0,
char="v"),
119: SixteenSegments(dp=0, u=0, t=1, s=0, r=1, p=0, n=0, m=0, k=0, h=0, g=1, f=0, e=0, d=1, c=0, b=0, a=0,
char="w"),
120: SixteenSegments(dp=0, u=0, t=1, s=0, r=1, p=0, n=1, m=0, k=1, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="x"),
121: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=1, n=0, m=1, k=0, h=0, g=0, f=0, e=1, d=1, c=1, b=0, a=0,
char="y"),
122: SixteenSegments(dp=0, u=1, t=1, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=1, e=0, d=0, c=0, b=0, a=0,
char="z"),
123: SixteenSegments(dp=0, u=1, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=1, d=0, c=0, b=1, a=0,
char="{"),
124: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=0, n=0, m=1, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="|"),
125: SixteenSegments(dp=0, u=0, t=0, s=1, r=0, p=1, n=0, m=1, k=0, h=0, g=0, f=1, e=0, d=0, c=0, b=0, a=1,
char="}"),
126: SixteenSegments(dp=0, u=1, t=1, s=0, r=0, p=1, n=1, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="~"),
127: SixteenSegments(dp=0, u=0, t=0, s=0, r=0, p=0, n=0, m=0, k=0, h=0, g=0, f=0, e=0, d=0, c=0, b=0, a=0,
char="(del)"),
}
class AsciiSegment(Segment):
"""Ascii segment mapping."""
__slots__ = ["ascii_value"]
def __init__(self, dp, ascii_value, char):
"""Initialise ascii segment."""
super().__init__(dp, char)
self.ascii_value = ascii_value
def get_ascii_encoding(self):
"""Return ascii encoding."""
return bytes([self.ascii_value])
def get_ascii_with_dp_encoding(self):
"""Return ascii encoding with bit 7 for dp."""
return bytes([self.ascii_value + (128 if self.dp else 0)])
ascii_segments = {
None: AsciiSegment(dp=0, ascii_value=ord(" "), char=" ")
}
for i in range(128):
ascii_segments[i] = AsciiSegment(dp=0, ascii_value=i, char=chr(i))
| 68.528363 | 132 | 0.489026 | 10,510 | 42,282 | 1.952046 | 0.027973 | 0.044746 | 0.026906 | 0.027491 | 0.860109 | 0.840271 | 0.825746 | 0.807906 | 0.796647 | 0.791334 | 0 | 0.167946 | 0.22998 | 42,282 | 616 | 133 | 68.63961 | 0.462191 | 0.071756 | 0 | 0.086869 | 0 | 0.442424 | 0.012761 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038384 | false | 0 | 0 | 0 | 0.090909 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.